input
stringlengths
53
297k
output
stringclasses
604 values
repo_name
stringclasses
376 values
test_path
stringclasses
583 values
code_path
stringlengths
7
116
"""Provides device trigger for lights.""" from typing import List import voluptuous as vol from homeassistant.components.automation import AutomationActionType from homeassistant.components.device_automation import toggle_entity from homeassistant.const import CONF_DOMAIN from homeassistant.core import CALLBACK_TYPE, HomeAssistant from homeassistant.helpers.typing import ConfigType from . import DOMAIN TRIGGER_SCHEMA = toggle_entity.TRIGGER_SCHEMA.extend( {vol.Required(CONF_DOMAIN): DOMAIN} ) async def async_attach_trigger( hass: HomeAssistant, config: ConfigType, action: AutomationActionType, automation_info: dict, ) -> CALLBACK_TYPE: """Listen for state changes based on configuration.""" return await toggle_entity.async_attach_trigger( hass, config, action, automation_info ) async def async_get_triggers(hass: HomeAssistant, device_id: str) -> List[dict]: """List device triggers.""" return await toggle_entity.async_get_triggers(hass, device_id, DOMAIN) async def async_get_trigger_capabilities(hass: HomeAssistant, config: dict) -> dict: """List trigger capabilities.""" return await toggle_entity.async_get_trigger_capabilities(hass, config)
"""Test the Kodi config flow.""" from unittest.mock import AsyncMock, PropertyMock, patch import pytest from homeassistant import config_entries from homeassistant.components.kodi.config_flow import ( CannotConnectError, InvalidAuthError, ) from homeassistant.components.kodi.const import DEFAULT_TIMEOUT, DOMAIN from .util import ( TEST_CREDENTIALS, TEST_DISCOVERY, TEST_DISCOVERY_WO_UUID, TEST_HOST, TEST_IMPORT, TEST_WS_PORT, UUID, MockConnection, MockWSConnection, get_kodi_connection, ) from tests.common import MockConfigEntry @pytest.fixture async def user_flow(hass): """Return a user-initiated flow after filling in host info.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) assert result["type"] == "form" assert result["errors"] == {} return result["flow_id"] async def test_user_flow(hass, user_flow): """Test a successful user initiated flow.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ), patch( "homeassistant.components.kodi.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.kodi.async_setup_entry", return_value=True, ) as mock_setup_entry: result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) await hass.async_block_till_done() assert result["type"] == "create_entry" assert result["title"] == TEST_HOST["host"] assert result["data"] == { **TEST_HOST, **TEST_WS_PORT, "password": None, "username": None, "name": None, "timeout": DEFAULT_TIMEOUT, } assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_form_valid_auth(hass, user_flow): """Test we handle valid auth.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=InvalidAuthError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "credentials" assert result["errors"] == {} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ), patch( "homeassistant.components.kodi.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.kodi.async_setup_entry", return_value=True, ) as mock_setup_entry: result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_CREDENTIALS ) await hass.async_block_till_done() assert result["type"] == "create_entry" assert result["title"] == TEST_HOST["host"] assert result["data"] == { **TEST_HOST, **TEST_WS_PORT, **TEST_CREDENTIALS, "name": None, "timeout": DEFAULT_TIMEOUT, } assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_form_valid_ws_port(hass, user_flow): """Test we handle valid websocket port.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=CannotConnectError), ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ), patch( "homeassistant.components.kodi.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.kodi.async_setup_entry", return_value=True, ) as mock_setup_entry: result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_WS_PORT ) await hass.async_block_till_done() assert result["type"] == "create_entry" assert result["title"] == TEST_HOST["host"] assert result["data"] == { **TEST_HOST, **TEST_WS_PORT, "password": None, "username": None, "name": None, "timeout": DEFAULT_TIMEOUT, } assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_form_empty_ws_port(hass, user_flow): """Test we handle an empty websocket port input.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=CannotConnectError), ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {} with patch( "homeassistant.components.kodi.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.kodi.async_setup_entry", return_value=True, ) as mock_setup_entry: result = await hass.config_entries.flow.async_configure( result["flow_id"], {"ws_port": 0} ) await hass.async_block_till_done() assert result["type"] == "create_entry" assert result["title"] == TEST_HOST["host"] assert result["data"] == { **TEST_HOST, "ws_port": None, "password": None, "username": None, "name": None, "timeout": DEFAULT_TIMEOUT, } assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_form_invalid_auth(hass, user_flow): """Test we handle invalid auth.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=InvalidAuthError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "credentials" assert result["errors"] == {} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=InvalidAuthError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_CREDENTIALS ) assert result["type"] == "form" assert result["step_id"] == "credentials" assert result["errors"] == {"base": "invalid_auth"} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=CannotConnectError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_CREDENTIALS ) assert result["type"] == "form" assert result["step_id"] == "credentials" assert result["errors"] == {"base": "cannot_connect"} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=Exception, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_CREDENTIALS ) assert result["type"] == "form" assert result["step_id"] == "credentials" assert result["errors"] == {"base": "unknown"} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=CannotConnectError), ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_CREDENTIALS ) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {} async def test_form_cannot_connect_http(hass, user_flow): """Test we handle cannot connect over HTTP error.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=CannotConnectError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "user" assert result["errors"] == {"base": "cannot_connect"} async def test_form_exception_http(hass, user_flow): """Test we handle generic exception over HTTP.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=Exception, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "user" assert result["errors"] == {"base": "unknown"} async def test_form_cannot_connect_ws(hass, user_flow): """Test we handle cannot connect over WebSocket error.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=CannotConnectError), ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connected", new_callable=PropertyMock(return_value=False) ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_WS_PORT ) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {"base": "cannot_connect"} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=CannotConnectError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_WS_PORT ) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {"base": "cannot_connect"} async def test_form_exception_ws(hass, user_flow): """Test we handle generic exception over WebSocket.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=CannotConnectError), ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=Exception) ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_WS_PORT ) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {"base": "unknown"} async def test_discovery(hass): """Test discovery flow works.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "form" assert result["step_id"] == "discovery_confirm" with patch( "homeassistant.components.kodi.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.kodi.async_setup_entry", return_value=True, ) as mock_setup_entry: result = await hass.config_entries.flow.async_configure( flow_id=result["flow_id"], user_input={} ) await hass.async_block_till_done() assert result["type"] == "create_entry" assert result["title"] == "hostname" assert result["data"] == { **TEST_HOST, **TEST_WS_PORT, "password": None, "username": None, "name": "hostname", "timeout": DEFAULT_TIMEOUT, } assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_discovery_cannot_connect_http(hass): """Test discovery aborts if cannot connect.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=CannotConnectError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "abort" assert result["reason"] == "cannot_connect" async def test_discovery_cannot_connect_ws(hass): """Test discovery aborts if cannot connect to websocket.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=CannotConnectError), ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {} async def test_discovery_exception_http(hass, user_flow): """Test we handle generic exception during discovery validation.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=Exception, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "abort" assert result["reason"] == "unknown" async def test_discovery_invalid_auth(hass): """Test we handle invalid auth during discovery.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=InvalidAuthError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "form" assert result["step_id"] == "credentials" assert result["errors"] == {} async def test_discovery_duplicate_data(hass): """Test discovery aborts if same mDNS packet arrives.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "form" assert result["step_id"] == "discovery_confirm" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "abort" assert result["reason"] == "already_in_progress" async def test_discovery_updates_unique_id(hass): """Test a duplicate discovery id aborts and updates existing entry.""" entry = MockConfigEntry( domain=DOMAIN, unique_id=UUID, data={"host": "dummy", "port": 11, "namename": "dummy.local."}, ) entry.add_to_hass(hass) result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "abort" assert result["reason"] == "already_configured" assert entry.data["host"] == "1.1.1.1" assert entry.data["port"] == 8080 assert entry.data["name"] == "hostname" async def test_discovery_without_unique_id(hass): """Test a discovery flow with no unique id aborts.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY_WO_UUID ) assert result["type"] == "abort" assert result["reason"] == "no_uuid" async def test_form_import(hass): """Test we get the form with import source.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ), patch( "homeassistant.components.kodi.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.kodi.async_setup_entry", return_value=True, ) as mock_setup_entry: result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data=TEST_IMPORT, ) await hass.async_block_till_done() assert result["type"] == "create_entry" assert result["title"] == TEST_IMPORT["name"] assert result["data"] == TEST_IMPORT assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_form_import_invalid_auth(hass): """Test we handle invalid auth on import.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=InvalidAuthError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data=TEST_IMPORT, ) assert result["type"] == "abort" assert result["reason"] == "invalid_auth" async def test_form_import_cannot_connect(hass): """Test we handle cannot connect on import.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=CannotConnectError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data=TEST_IMPORT, ) assert result["type"] == "abort" assert result["reason"] == "cannot_connect" async def test_form_import_exception(hass): """Test we handle unknown exception on import.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=Exception, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data=TEST_IMPORT, ) assert result["type"] == "abort" assert result["reason"] == "unknown"
turbokongen/home-assistant
tests/components/kodi/test_config_flow.py
homeassistant/components/light/device_trigger.py
"""Support for switch controlled using a telnet connection.""" from datetime import timedelta import logging import telnetlib import voluptuous as vol from homeassistant.components.switch import ( ENTITY_ID_FORMAT, PLATFORM_SCHEMA, SwitchEntity, ) from homeassistant.const import ( CONF_COMMAND_OFF, CONF_COMMAND_ON, CONF_COMMAND_STATE, CONF_NAME, CONF_PORT, CONF_RESOURCE, CONF_SWITCHES, CONF_TIMEOUT, CONF_VALUE_TEMPLATE, ) import homeassistant.helpers.config_validation as cv _LOGGER = logging.getLogger(__name__) DEFAULT_PORT = 23 DEFAULT_TIMEOUT = 0.2 SWITCH_SCHEMA = vol.Schema( { vol.Required(CONF_COMMAND_OFF): cv.string, vol.Required(CONF_COMMAND_ON): cv.string, vol.Required(CONF_RESOURCE): cv.string, vol.Optional(CONF_VALUE_TEMPLATE): cv.template, vol.Optional(CONF_COMMAND_STATE): cv.string, vol.Optional(CONF_NAME): cv.string, vol.Optional(CONF_PORT, default=DEFAULT_PORT): cv.port, vol.Optional(CONF_TIMEOUT, default=DEFAULT_TIMEOUT): vol.Coerce(float), } ) PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( {vol.Required(CONF_SWITCHES): cv.schema_with_slug_keys(SWITCH_SCHEMA)} ) SCAN_INTERVAL = timedelta(seconds=10) def setup_platform(hass, config, add_entities, discovery_info=None): """Find and return switches controlled by telnet commands.""" devices = config.get(CONF_SWITCHES, {}) switches = [] for object_id, device_config in devices.items(): value_template = device_config.get(CONF_VALUE_TEMPLATE) if value_template is not None: value_template.hass = hass switches.append( TelnetSwitch( hass, object_id, device_config.get(CONF_RESOURCE), device_config.get(CONF_PORT), device_config.get(CONF_NAME, object_id), device_config.get(CONF_COMMAND_ON), device_config.get(CONF_COMMAND_OFF), device_config.get(CONF_COMMAND_STATE), value_template, device_config.get(CONF_TIMEOUT), ) ) if not switches: _LOGGER.error("No switches added") return add_entities(switches) class TelnetSwitch(SwitchEntity): """Representation of a switch that can be toggled using telnet commands.""" def __init__( self, hass, object_id, resource, port, friendly_name, command_on, command_off, command_state, value_template, timeout, ): """Initialize the switch.""" self._hass = hass self.entity_id = ENTITY_ID_FORMAT.format(object_id) self._resource = resource self._port = port self._name = friendly_name self._state = False self._command_on = command_on self._command_off = command_off self._command_state = command_state self._value_template = value_template self._timeout = timeout def _telnet_command(self, command): try: telnet = telnetlib.Telnet(self._resource, self._port) telnet.write(command.encode("ASCII") + b"\r") response = telnet.read_until(b"\r", timeout=self._timeout) _LOGGER.debug("telnet response: %s", response.decode("ASCII").strip()) return response.decode("ASCII").strip() except OSError as error: _LOGGER.error( 'Command "%s" failed with exception: %s', command, repr(error) ) return None @property def name(self): """Return the name of the switch.""" return self._name @property def should_poll(self): """Only poll if we have state command.""" return self._command_state is not None @property def is_on(self): """Return true if device is on.""" return self._state @property def assumed_state(self): """Return true if no state command is defined, false otherwise.""" return self._command_state is None def update(self): """Update device state.""" response = self._telnet_command(self._command_state) if response: rendered = self._value_template.render_with_possible_json_value(response) self._state = rendered == "True" else: _LOGGER.warning("Empty response for command: %s", self._command_state) def turn_on(self, **kwargs): """Turn the device on.""" self._telnet_command(self._command_on) if self.assumed_state: self._state = True def turn_off(self, **kwargs): """Turn the device off.""" self._telnet_command(self._command_off) if self.assumed_state: self._state = False
"""Test the Kodi config flow.""" from unittest.mock import AsyncMock, PropertyMock, patch import pytest from homeassistant import config_entries from homeassistant.components.kodi.config_flow import ( CannotConnectError, InvalidAuthError, ) from homeassistant.components.kodi.const import DEFAULT_TIMEOUT, DOMAIN from .util import ( TEST_CREDENTIALS, TEST_DISCOVERY, TEST_DISCOVERY_WO_UUID, TEST_HOST, TEST_IMPORT, TEST_WS_PORT, UUID, MockConnection, MockWSConnection, get_kodi_connection, ) from tests.common import MockConfigEntry @pytest.fixture async def user_flow(hass): """Return a user-initiated flow after filling in host info.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) assert result["type"] == "form" assert result["errors"] == {} return result["flow_id"] async def test_user_flow(hass, user_flow): """Test a successful user initiated flow.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ), patch( "homeassistant.components.kodi.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.kodi.async_setup_entry", return_value=True, ) as mock_setup_entry: result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) await hass.async_block_till_done() assert result["type"] == "create_entry" assert result["title"] == TEST_HOST["host"] assert result["data"] == { **TEST_HOST, **TEST_WS_PORT, "password": None, "username": None, "name": None, "timeout": DEFAULT_TIMEOUT, } assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_form_valid_auth(hass, user_flow): """Test we handle valid auth.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=InvalidAuthError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "credentials" assert result["errors"] == {} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ), patch( "homeassistant.components.kodi.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.kodi.async_setup_entry", return_value=True, ) as mock_setup_entry: result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_CREDENTIALS ) await hass.async_block_till_done() assert result["type"] == "create_entry" assert result["title"] == TEST_HOST["host"] assert result["data"] == { **TEST_HOST, **TEST_WS_PORT, **TEST_CREDENTIALS, "name": None, "timeout": DEFAULT_TIMEOUT, } assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_form_valid_ws_port(hass, user_flow): """Test we handle valid websocket port.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=CannotConnectError), ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ), patch( "homeassistant.components.kodi.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.kodi.async_setup_entry", return_value=True, ) as mock_setup_entry: result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_WS_PORT ) await hass.async_block_till_done() assert result["type"] == "create_entry" assert result["title"] == TEST_HOST["host"] assert result["data"] == { **TEST_HOST, **TEST_WS_PORT, "password": None, "username": None, "name": None, "timeout": DEFAULT_TIMEOUT, } assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_form_empty_ws_port(hass, user_flow): """Test we handle an empty websocket port input.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=CannotConnectError), ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {} with patch( "homeassistant.components.kodi.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.kodi.async_setup_entry", return_value=True, ) as mock_setup_entry: result = await hass.config_entries.flow.async_configure( result["flow_id"], {"ws_port": 0} ) await hass.async_block_till_done() assert result["type"] == "create_entry" assert result["title"] == TEST_HOST["host"] assert result["data"] == { **TEST_HOST, "ws_port": None, "password": None, "username": None, "name": None, "timeout": DEFAULT_TIMEOUT, } assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_form_invalid_auth(hass, user_flow): """Test we handle invalid auth.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=InvalidAuthError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "credentials" assert result["errors"] == {} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=InvalidAuthError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_CREDENTIALS ) assert result["type"] == "form" assert result["step_id"] == "credentials" assert result["errors"] == {"base": "invalid_auth"} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=CannotConnectError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_CREDENTIALS ) assert result["type"] == "form" assert result["step_id"] == "credentials" assert result["errors"] == {"base": "cannot_connect"} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=Exception, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_CREDENTIALS ) assert result["type"] == "form" assert result["step_id"] == "credentials" assert result["errors"] == {"base": "unknown"} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=CannotConnectError), ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_CREDENTIALS ) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {} async def test_form_cannot_connect_http(hass, user_flow): """Test we handle cannot connect over HTTP error.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=CannotConnectError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "user" assert result["errors"] == {"base": "cannot_connect"} async def test_form_exception_http(hass, user_flow): """Test we handle generic exception over HTTP.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=Exception, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "user" assert result["errors"] == {"base": "unknown"} async def test_form_cannot_connect_ws(hass, user_flow): """Test we handle cannot connect over WebSocket error.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=CannotConnectError), ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connected", new_callable=PropertyMock(return_value=False) ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_WS_PORT ) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {"base": "cannot_connect"} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=CannotConnectError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_WS_PORT ) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {"base": "cannot_connect"} async def test_form_exception_ws(hass, user_flow): """Test we handle generic exception over WebSocket.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=CannotConnectError), ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=Exception) ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_WS_PORT ) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {"base": "unknown"} async def test_discovery(hass): """Test discovery flow works.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "form" assert result["step_id"] == "discovery_confirm" with patch( "homeassistant.components.kodi.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.kodi.async_setup_entry", return_value=True, ) as mock_setup_entry: result = await hass.config_entries.flow.async_configure( flow_id=result["flow_id"], user_input={} ) await hass.async_block_till_done() assert result["type"] == "create_entry" assert result["title"] == "hostname" assert result["data"] == { **TEST_HOST, **TEST_WS_PORT, "password": None, "username": None, "name": "hostname", "timeout": DEFAULT_TIMEOUT, } assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_discovery_cannot_connect_http(hass): """Test discovery aborts if cannot connect.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=CannotConnectError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "abort" assert result["reason"] == "cannot_connect" async def test_discovery_cannot_connect_ws(hass): """Test discovery aborts if cannot connect to websocket.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=CannotConnectError), ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {} async def test_discovery_exception_http(hass, user_flow): """Test we handle generic exception during discovery validation.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=Exception, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "abort" assert result["reason"] == "unknown" async def test_discovery_invalid_auth(hass): """Test we handle invalid auth during discovery.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=InvalidAuthError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "form" assert result["step_id"] == "credentials" assert result["errors"] == {} async def test_discovery_duplicate_data(hass): """Test discovery aborts if same mDNS packet arrives.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "form" assert result["step_id"] == "discovery_confirm" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "abort" assert result["reason"] == "already_in_progress" async def test_discovery_updates_unique_id(hass): """Test a duplicate discovery id aborts and updates existing entry.""" entry = MockConfigEntry( domain=DOMAIN, unique_id=UUID, data={"host": "dummy", "port": 11, "namename": "dummy.local."}, ) entry.add_to_hass(hass) result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "abort" assert result["reason"] == "already_configured" assert entry.data["host"] == "1.1.1.1" assert entry.data["port"] == 8080 assert entry.data["name"] == "hostname" async def test_discovery_without_unique_id(hass): """Test a discovery flow with no unique id aborts.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY_WO_UUID ) assert result["type"] == "abort" assert result["reason"] == "no_uuid" async def test_form_import(hass): """Test we get the form with import source.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ), patch( "homeassistant.components.kodi.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.kodi.async_setup_entry", return_value=True, ) as mock_setup_entry: result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data=TEST_IMPORT, ) await hass.async_block_till_done() assert result["type"] == "create_entry" assert result["title"] == TEST_IMPORT["name"] assert result["data"] == TEST_IMPORT assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_form_import_invalid_auth(hass): """Test we handle invalid auth on import.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=InvalidAuthError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data=TEST_IMPORT, ) assert result["type"] == "abort" assert result["reason"] == "invalid_auth" async def test_form_import_cannot_connect(hass): """Test we handle cannot connect on import.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=CannotConnectError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data=TEST_IMPORT, ) assert result["type"] == "abort" assert result["reason"] == "cannot_connect" async def test_form_import_exception(hass): """Test we handle unknown exception on import.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=Exception, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data=TEST_IMPORT, ) assert result["type"] == "abort" assert result["reason"] == "unknown"
turbokongen/home-assistant
tests/components/kodi/test_config_flow.py
homeassistant/components/telnet/switch.py
"""Support for Google Nest SDM Cameras.""" import datetime import logging from typing import Optional from google_nest_sdm.camera_traits import ( CameraEventImageTrait, CameraImageTrait, CameraLiveStreamTrait, ) from google_nest_sdm.device import Device from google_nest_sdm.exceptions import GoogleNestException from haffmpeg.tools import IMAGE_JPEG from homeassistant.components.camera import SUPPORT_STREAM, Camera from homeassistant.components.ffmpeg import async_get_image from homeassistant.config_entries import ConfigEntry from homeassistant.exceptions import PlatformNotReady from homeassistant.helpers.event import async_track_point_in_utc_time from homeassistant.helpers.typing import HomeAssistantType from homeassistant.util.dt import utcnow from .const import DATA_SUBSCRIBER, DOMAIN from .device_info import DeviceInfo _LOGGER = logging.getLogger(__name__) # Used to schedule an alarm to refresh the stream before expiration STREAM_EXPIRATION_BUFFER = datetime.timedelta(seconds=30) async def async_setup_sdm_entry( hass: HomeAssistantType, entry: ConfigEntry, async_add_entities ) -> None: """Set up the cameras.""" subscriber = hass.data[DOMAIN][DATA_SUBSCRIBER] try: device_manager = await subscriber.async_get_device_manager() except GoogleNestException as err: raise PlatformNotReady from err # Fetch initial data so we have data when entities subscribe. entities = [] for device in device_manager.devices.values(): if ( CameraImageTrait.NAME in device.traits or CameraLiveStreamTrait.NAME in device.traits ): entities.append(NestCamera(device)) async_add_entities(entities) class NestCamera(Camera): """Devices that support cameras.""" def __init__(self, device: Device): """Initialize the camera.""" super().__init__() self._device = device self._device_info = DeviceInfo(device) self._stream = None self._stream_refresh_unsub = None # Cache of most recent event image self._event_id = None self._event_image_bytes = None self._event_image_cleanup_unsub = None @property def should_poll(self) -> bool: """Disable polling since entities have state pushed via pubsub.""" return False @property def unique_id(self) -> Optional[str]: """Return a unique ID.""" # The API "name" field is a unique device identifier. return f"{self._device.name}-camera" @property def name(self): """Return the name of the camera.""" return self._device_info.device_name @property def device_info(self): """Return device specific attributes.""" return self._device_info.device_info @property def brand(self): """Return the camera brand.""" return self._device_info.device_brand @property def model(self): """Return the camera model.""" return self._device_info.device_model @property def supported_features(self): """Flag supported features.""" supported_features = 0 if CameraLiveStreamTrait.NAME in self._device.traits: supported_features |= SUPPORT_STREAM return supported_features async def stream_source(self): """Return the source of the stream.""" if CameraLiveStreamTrait.NAME not in self._device.traits: return None trait = self._device.traits[CameraLiveStreamTrait.NAME] if not self._stream: _LOGGER.debug("Fetching stream url") self._stream = await trait.generate_rtsp_stream() self._schedule_stream_refresh() if self._stream.expires_at < utcnow(): _LOGGER.warning("Stream already expired") return self._stream.rtsp_stream_url def _schedule_stream_refresh(self): """Schedules an alarm to refresh the stream url before expiration.""" _LOGGER.debug("New stream url expires at %s", self._stream.expires_at) refresh_time = self._stream.expires_at - STREAM_EXPIRATION_BUFFER # Schedule an alarm to extend the stream if self._stream_refresh_unsub is not None: self._stream_refresh_unsub() self._stream_refresh_unsub = async_track_point_in_utc_time( self.hass, self._handle_stream_refresh, refresh_time, ) async def _handle_stream_refresh(self, now): """Alarm that fires to check if the stream should be refreshed.""" if not self._stream: return _LOGGER.debug("Extending stream url") try: self._stream = await self._stream.extend_rtsp_stream() except GoogleNestException as err: _LOGGER.debug("Failed to extend stream: %s", err) # Next attempt to catch a url will get a new one self._stream = None return # Update the stream worker with the latest valid url if self.stream: self.stream.update_source(self._stream.rtsp_stream_url) self._schedule_stream_refresh() async def async_will_remove_from_hass(self): """Invalidates the RTSP token when unloaded.""" if self._stream: _LOGGER.debug("Invalidating stream") await self._stream.stop_rtsp_stream() if self._stream_refresh_unsub: self._stream_refresh_unsub() self._event_id = None self._event_image_bytes = None if self._event_image_cleanup_unsub is not None: self._event_image_cleanup_unsub() async def async_added_to_hass(self): """Run when entity is added to register update signal handler.""" self.async_on_remove( self._device.add_update_listener(self.async_write_ha_state) ) async def async_camera_image(self): """Return bytes of camera image.""" # Returns the snapshot of the last event for ~30 seconds after the event active_event_image = await self._async_active_event_image() if active_event_image: return active_event_image # Fetch still image from the live stream stream_url = await self.stream_source() if not stream_url: return None return await async_get_image(self.hass, stream_url, output_format=IMAGE_JPEG) async def _async_active_event_image(self): """Return image from any active events happening.""" if CameraEventImageTrait.NAME not in self._device.traits: return None trait = self._device.active_event_trait if not trait: return None # Reuse image bytes if they have already been fetched event = trait.last_event if self._event_id is not None and self._event_id == event.event_id: return self._event_image_bytes _LOGGER.debug("Generating event image URL for event_id %s", event.event_id) image_bytes = await self._async_fetch_active_event_image(trait) if image_bytes is None: return None self._event_id = event.event_id self._event_image_bytes = image_bytes self._schedule_event_image_cleanup(event.expires_at) return image_bytes async def _async_fetch_active_event_image(self, trait): """Return image bytes for an active event.""" try: event_image = await trait.generate_active_event_image() except GoogleNestException as err: _LOGGER.debug("Unable to generate event image URL: %s", err) return None if not event_image: return None try: return await event_image.contents() except GoogleNestException as err: _LOGGER.debug("Unable to fetch event image: %s", err) return None def _schedule_event_image_cleanup(self, point_in_time): """Schedules an alarm to remove the image bytes from memory, honoring expiration.""" if self._event_image_cleanup_unsub is not None: self._event_image_cleanup_unsub() self._event_image_cleanup_unsub = async_track_point_in_utc_time( self.hass, self._handle_event_image_cleanup, point_in_time, ) def _handle_event_image_cleanup(self, now): """Clear images cached from events and scheduled callback.""" self._event_id = None self._event_image_bytes = None self._event_image_cleanup_unsub = None
"""Test the Kodi config flow.""" from unittest.mock import AsyncMock, PropertyMock, patch import pytest from homeassistant import config_entries from homeassistant.components.kodi.config_flow import ( CannotConnectError, InvalidAuthError, ) from homeassistant.components.kodi.const import DEFAULT_TIMEOUT, DOMAIN from .util import ( TEST_CREDENTIALS, TEST_DISCOVERY, TEST_DISCOVERY_WO_UUID, TEST_HOST, TEST_IMPORT, TEST_WS_PORT, UUID, MockConnection, MockWSConnection, get_kodi_connection, ) from tests.common import MockConfigEntry @pytest.fixture async def user_flow(hass): """Return a user-initiated flow after filling in host info.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) assert result["type"] == "form" assert result["errors"] == {} return result["flow_id"] async def test_user_flow(hass, user_flow): """Test a successful user initiated flow.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ), patch( "homeassistant.components.kodi.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.kodi.async_setup_entry", return_value=True, ) as mock_setup_entry: result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) await hass.async_block_till_done() assert result["type"] == "create_entry" assert result["title"] == TEST_HOST["host"] assert result["data"] == { **TEST_HOST, **TEST_WS_PORT, "password": None, "username": None, "name": None, "timeout": DEFAULT_TIMEOUT, } assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_form_valid_auth(hass, user_flow): """Test we handle valid auth.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=InvalidAuthError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "credentials" assert result["errors"] == {} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ), patch( "homeassistant.components.kodi.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.kodi.async_setup_entry", return_value=True, ) as mock_setup_entry: result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_CREDENTIALS ) await hass.async_block_till_done() assert result["type"] == "create_entry" assert result["title"] == TEST_HOST["host"] assert result["data"] == { **TEST_HOST, **TEST_WS_PORT, **TEST_CREDENTIALS, "name": None, "timeout": DEFAULT_TIMEOUT, } assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_form_valid_ws_port(hass, user_flow): """Test we handle valid websocket port.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=CannotConnectError), ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ), patch( "homeassistant.components.kodi.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.kodi.async_setup_entry", return_value=True, ) as mock_setup_entry: result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_WS_PORT ) await hass.async_block_till_done() assert result["type"] == "create_entry" assert result["title"] == TEST_HOST["host"] assert result["data"] == { **TEST_HOST, **TEST_WS_PORT, "password": None, "username": None, "name": None, "timeout": DEFAULT_TIMEOUT, } assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_form_empty_ws_port(hass, user_flow): """Test we handle an empty websocket port input.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=CannotConnectError), ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {} with patch( "homeassistant.components.kodi.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.kodi.async_setup_entry", return_value=True, ) as mock_setup_entry: result = await hass.config_entries.flow.async_configure( result["flow_id"], {"ws_port": 0} ) await hass.async_block_till_done() assert result["type"] == "create_entry" assert result["title"] == TEST_HOST["host"] assert result["data"] == { **TEST_HOST, "ws_port": None, "password": None, "username": None, "name": None, "timeout": DEFAULT_TIMEOUT, } assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_form_invalid_auth(hass, user_flow): """Test we handle invalid auth.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=InvalidAuthError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "credentials" assert result["errors"] == {} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=InvalidAuthError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_CREDENTIALS ) assert result["type"] == "form" assert result["step_id"] == "credentials" assert result["errors"] == {"base": "invalid_auth"} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=CannotConnectError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_CREDENTIALS ) assert result["type"] == "form" assert result["step_id"] == "credentials" assert result["errors"] == {"base": "cannot_connect"} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=Exception, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_CREDENTIALS ) assert result["type"] == "form" assert result["step_id"] == "credentials" assert result["errors"] == {"base": "unknown"} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=CannotConnectError), ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_CREDENTIALS ) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {} async def test_form_cannot_connect_http(hass, user_flow): """Test we handle cannot connect over HTTP error.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=CannotConnectError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "user" assert result["errors"] == {"base": "cannot_connect"} async def test_form_exception_http(hass, user_flow): """Test we handle generic exception over HTTP.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=Exception, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "user" assert result["errors"] == {"base": "unknown"} async def test_form_cannot_connect_ws(hass, user_flow): """Test we handle cannot connect over WebSocket error.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=CannotConnectError), ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connected", new_callable=PropertyMock(return_value=False) ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_WS_PORT ) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {"base": "cannot_connect"} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=CannotConnectError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_WS_PORT ) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {"base": "cannot_connect"} async def test_form_exception_ws(hass, user_flow): """Test we handle generic exception over WebSocket.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=CannotConnectError), ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=Exception) ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_WS_PORT ) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {"base": "unknown"} async def test_discovery(hass): """Test discovery flow works.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "form" assert result["step_id"] == "discovery_confirm" with patch( "homeassistant.components.kodi.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.kodi.async_setup_entry", return_value=True, ) as mock_setup_entry: result = await hass.config_entries.flow.async_configure( flow_id=result["flow_id"], user_input={} ) await hass.async_block_till_done() assert result["type"] == "create_entry" assert result["title"] == "hostname" assert result["data"] == { **TEST_HOST, **TEST_WS_PORT, "password": None, "username": None, "name": "hostname", "timeout": DEFAULT_TIMEOUT, } assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_discovery_cannot_connect_http(hass): """Test discovery aborts if cannot connect.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=CannotConnectError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "abort" assert result["reason"] == "cannot_connect" async def test_discovery_cannot_connect_ws(hass): """Test discovery aborts if cannot connect to websocket.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=CannotConnectError), ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {} async def test_discovery_exception_http(hass, user_flow): """Test we handle generic exception during discovery validation.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=Exception, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "abort" assert result["reason"] == "unknown" async def test_discovery_invalid_auth(hass): """Test we handle invalid auth during discovery.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=InvalidAuthError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "form" assert result["step_id"] == "credentials" assert result["errors"] == {} async def test_discovery_duplicate_data(hass): """Test discovery aborts if same mDNS packet arrives.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "form" assert result["step_id"] == "discovery_confirm" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "abort" assert result["reason"] == "already_in_progress" async def test_discovery_updates_unique_id(hass): """Test a duplicate discovery id aborts and updates existing entry.""" entry = MockConfigEntry( domain=DOMAIN, unique_id=UUID, data={"host": "dummy", "port": 11, "namename": "dummy.local."}, ) entry.add_to_hass(hass) result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "abort" assert result["reason"] == "already_configured" assert entry.data["host"] == "1.1.1.1" assert entry.data["port"] == 8080 assert entry.data["name"] == "hostname" async def test_discovery_without_unique_id(hass): """Test a discovery flow with no unique id aborts.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY_WO_UUID ) assert result["type"] == "abort" assert result["reason"] == "no_uuid" async def test_form_import(hass): """Test we get the form with import source.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ), patch( "homeassistant.components.kodi.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.kodi.async_setup_entry", return_value=True, ) as mock_setup_entry: result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data=TEST_IMPORT, ) await hass.async_block_till_done() assert result["type"] == "create_entry" assert result["title"] == TEST_IMPORT["name"] assert result["data"] == TEST_IMPORT assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_form_import_invalid_auth(hass): """Test we handle invalid auth on import.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=InvalidAuthError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data=TEST_IMPORT, ) assert result["type"] == "abort" assert result["reason"] == "invalid_auth" async def test_form_import_cannot_connect(hass): """Test we handle cannot connect on import.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=CannotConnectError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data=TEST_IMPORT, ) assert result["type"] == "abort" assert result["reason"] == "cannot_connect" async def test_form_import_exception(hass): """Test we handle unknown exception on import.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=Exception, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data=TEST_IMPORT, ) assert result["type"] == "abort" assert result["reason"] == "unknown"
turbokongen/home-assistant
tests/components/kodi/test_config_flow.py
homeassistant/components/nest/camera_sdm.py
"""Support for testing internet speed via Speedtest.net.""" from datetime import timedelta import logging import speedtest import voluptuous as vol from homeassistant.config_entries import SOURCE_IMPORT from homeassistant.const import ( CONF_MONITORED_CONDITIONS, CONF_SCAN_INTERVAL, EVENT_HOMEASSISTANT_STARTED, ) from homeassistant.core import CoreState, callback from homeassistant.exceptions import ConfigEntryNotReady import homeassistant.helpers.config_validation as cv from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed from .const import ( CONF_MANUAL, CONF_SERVER_ID, DEFAULT_SCAN_INTERVAL, DEFAULT_SERVER, DOMAIN, SENSOR_TYPES, SPEED_TEST_SERVICE, ) _LOGGER = logging.getLogger(__name__) CONFIG_SCHEMA = vol.Schema( { DOMAIN: vol.Schema( { vol.Optional(CONF_SERVER_ID): cv.positive_int, vol.Optional( CONF_SCAN_INTERVAL, default=timedelta(minutes=DEFAULT_SCAN_INTERVAL) ): cv.positive_time_period, vol.Optional(CONF_MANUAL, default=False): cv.boolean, vol.Optional( CONF_MONITORED_CONDITIONS, default=list(SENSOR_TYPES) ): vol.All(cv.ensure_list, [vol.In(list(SENSOR_TYPES))]), } ) }, extra=vol.ALLOW_EXTRA, ) def server_id_valid(server_id): """Check if server_id is valid.""" try: api = speedtest.Speedtest() api.get_servers([int(server_id)]) except (speedtest.ConfigRetrievalError, speedtest.NoMatchedServers): return False return True async def async_setup(hass, config): """Import integration from config.""" if DOMAIN in config: hass.async_create_task( hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_IMPORT}, data=config[DOMAIN] ) ) return True async def async_setup_entry(hass, config_entry): """Set up the Speedtest.net component.""" coordinator = SpeedTestDataCoordinator(hass, config_entry) await coordinator.async_setup() async def _enable_scheduled_speedtests(*_): """Activate the data update coordinator.""" coordinator.update_interval = timedelta( minutes=config_entry.options.get(CONF_SCAN_INTERVAL, DEFAULT_SCAN_INTERVAL) ) await coordinator.async_refresh() if not config_entry.options[CONF_MANUAL]: if hass.state == CoreState.running: await _enable_scheduled_speedtests() if not coordinator.last_update_success: raise ConfigEntryNotReady else: # Running a speed test during startup can prevent # integrations from being able to setup because it # can saturate the network interface. hass.bus.async_listen_once( EVENT_HOMEASSISTANT_STARTED, _enable_scheduled_speedtests ) hass.data[DOMAIN] = coordinator hass.async_create_task( hass.config_entries.async_forward_entry_setup(config_entry, "sensor") ) return True async def async_unload_entry(hass, config_entry): """Unload SpeedTest Entry from config_entry.""" hass.services.async_remove(DOMAIN, SPEED_TEST_SERVICE) hass.data[DOMAIN].async_unload() await hass.config_entries.async_forward_entry_unload(config_entry, "sensor") hass.data.pop(DOMAIN) return True class SpeedTestDataCoordinator(DataUpdateCoordinator): """Get the latest data from speedtest.net.""" def __init__(self, hass, config_entry): """Initialize the data object.""" self.hass = hass self.config_entry = config_entry self.api = None self.servers = {} self._unsub_update_listener = None super().__init__( self.hass, _LOGGER, name=DOMAIN, update_method=self.async_update, ) def update_servers(self): """Update list of test servers.""" try: server_list = self.api.get_servers() except speedtest.ConfigRetrievalError: _LOGGER.debug("Error retrieving server list") return self.servers[DEFAULT_SERVER] = {} for server in sorted( server_list.values(), key=lambda server: server[0]["country"] + server[0]["sponsor"], ): self.servers[ f"{server[0]['country']} - {server[0]['sponsor']} - {server[0]['name']}" ] = server[0] def update_data(self): """Get the latest data from speedtest.net.""" self.update_servers() self.api.closest.clear() if self.config_entry.options.get(CONF_SERVER_ID): server_id = self.config_entry.options.get(CONF_SERVER_ID) self.api.get_servers(servers=[server_id]) self.api.get_best_server() _LOGGER.debug( "Executing speedtest.net speed test with server_id: %s", self.api.best["id"] ) self.api.download() self.api.upload() return self.api.results.dict() async def async_update(self, *_): """Update Speedtest data.""" try: return await self.hass.async_add_executor_job(self.update_data) except (speedtest.ConfigRetrievalError, speedtest.NoMatchedServers) as err: raise UpdateFailed from err async def async_set_options(self): """Set options for entry.""" if not self.config_entry.options: data = {**self.config_entry.data} options = { CONF_SCAN_INTERVAL: data.pop(CONF_SCAN_INTERVAL, DEFAULT_SCAN_INTERVAL), CONF_MANUAL: data.pop(CONF_MANUAL, False), CONF_SERVER_ID: str(data.pop(CONF_SERVER_ID, "")), } self.hass.config_entries.async_update_entry( self.config_entry, data=data, options=options ) async def async_setup(self): """Set up SpeedTest.""" try: self.api = await self.hass.async_add_executor_job(speedtest.Speedtest) except speedtest.ConfigRetrievalError as err: raise ConfigEntryNotReady from err async def request_update(call): """Request update.""" await self.async_request_refresh() await self.async_set_options() await self.hass.async_add_executor_job(self.update_servers) self.hass.services.async_register(DOMAIN, SPEED_TEST_SERVICE, request_update) self._unsub_update_listener = self.config_entry.add_update_listener( options_updated_listener ) @callback def async_unload(self): """Unload the coordinator.""" if not self._unsub_update_listener: return self._unsub_update_listener() self._unsub_update_listener = None async def options_updated_listener(hass, entry): """Handle options update.""" if entry.options[CONF_MANUAL]: hass.data[DOMAIN].update_interval = None return hass.data[DOMAIN].update_interval = timedelta( minutes=entry.options[CONF_SCAN_INTERVAL] ) await hass.data[DOMAIN].async_request_refresh()
"""Test the Kodi config flow.""" from unittest.mock import AsyncMock, PropertyMock, patch import pytest from homeassistant import config_entries from homeassistant.components.kodi.config_flow import ( CannotConnectError, InvalidAuthError, ) from homeassistant.components.kodi.const import DEFAULT_TIMEOUT, DOMAIN from .util import ( TEST_CREDENTIALS, TEST_DISCOVERY, TEST_DISCOVERY_WO_UUID, TEST_HOST, TEST_IMPORT, TEST_WS_PORT, UUID, MockConnection, MockWSConnection, get_kodi_connection, ) from tests.common import MockConfigEntry @pytest.fixture async def user_flow(hass): """Return a user-initiated flow after filling in host info.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) assert result["type"] == "form" assert result["errors"] == {} return result["flow_id"] async def test_user_flow(hass, user_flow): """Test a successful user initiated flow.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ), patch( "homeassistant.components.kodi.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.kodi.async_setup_entry", return_value=True, ) as mock_setup_entry: result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) await hass.async_block_till_done() assert result["type"] == "create_entry" assert result["title"] == TEST_HOST["host"] assert result["data"] == { **TEST_HOST, **TEST_WS_PORT, "password": None, "username": None, "name": None, "timeout": DEFAULT_TIMEOUT, } assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_form_valid_auth(hass, user_flow): """Test we handle valid auth.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=InvalidAuthError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "credentials" assert result["errors"] == {} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ), patch( "homeassistant.components.kodi.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.kodi.async_setup_entry", return_value=True, ) as mock_setup_entry: result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_CREDENTIALS ) await hass.async_block_till_done() assert result["type"] == "create_entry" assert result["title"] == TEST_HOST["host"] assert result["data"] == { **TEST_HOST, **TEST_WS_PORT, **TEST_CREDENTIALS, "name": None, "timeout": DEFAULT_TIMEOUT, } assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_form_valid_ws_port(hass, user_flow): """Test we handle valid websocket port.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=CannotConnectError), ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ), patch( "homeassistant.components.kodi.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.kodi.async_setup_entry", return_value=True, ) as mock_setup_entry: result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_WS_PORT ) await hass.async_block_till_done() assert result["type"] == "create_entry" assert result["title"] == TEST_HOST["host"] assert result["data"] == { **TEST_HOST, **TEST_WS_PORT, "password": None, "username": None, "name": None, "timeout": DEFAULT_TIMEOUT, } assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_form_empty_ws_port(hass, user_flow): """Test we handle an empty websocket port input.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=CannotConnectError), ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {} with patch( "homeassistant.components.kodi.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.kodi.async_setup_entry", return_value=True, ) as mock_setup_entry: result = await hass.config_entries.flow.async_configure( result["flow_id"], {"ws_port": 0} ) await hass.async_block_till_done() assert result["type"] == "create_entry" assert result["title"] == TEST_HOST["host"] assert result["data"] == { **TEST_HOST, "ws_port": None, "password": None, "username": None, "name": None, "timeout": DEFAULT_TIMEOUT, } assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_form_invalid_auth(hass, user_flow): """Test we handle invalid auth.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=InvalidAuthError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "credentials" assert result["errors"] == {} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=InvalidAuthError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_CREDENTIALS ) assert result["type"] == "form" assert result["step_id"] == "credentials" assert result["errors"] == {"base": "invalid_auth"} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=CannotConnectError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_CREDENTIALS ) assert result["type"] == "form" assert result["step_id"] == "credentials" assert result["errors"] == {"base": "cannot_connect"} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=Exception, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_CREDENTIALS ) assert result["type"] == "form" assert result["step_id"] == "credentials" assert result["errors"] == {"base": "unknown"} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=CannotConnectError), ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_CREDENTIALS ) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {} async def test_form_cannot_connect_http(hass, user_flow): """Test we handle cannot connect over HTTP error.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=CannotConnectError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "user" assert result["errors"] == {"base": "cannot_connect"} async def test_form_exception_http(hass, user_flow): """Test we handle generic exception over HTTP.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=Exception, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "user" assert result["errors"] == {"base": "unknown"} async def test_form_cannot_connect_ws(hass, user_flow): """Test we handle cannot connect over WebSocket error.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=CannotConnectError), ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connected", new_callable=PropertyMock(return_value=False) ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_WS_PORT ) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {"base": "cannot_connect"} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=CannotConnectError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_WS_PORT ) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {"base": "cannot_connect"} async def test_form_exception_ws(hass, user_flow): """Test we handle generic exception over WebSocket.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=CannotConnectError), ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=Exception) ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_WS_PORT ) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {"base": "unknown"} async def test_discovery(hass): """Test discovery flow works.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "form" assert result["step_id"] == "discovery_confirm" with patch( "homeassistant.components.kodi.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.kodi.async_setup_entry", return_value=True, ) as mock_setup_entry: result = await hass.config_entries.flow.async_configure( flow_id=result["flow_id"], user_input={} ) await hass.async_block_till_done() assert result["type"] == "create_entry" assert result["title"] == "hostname" assert result["data"] == { **TEST_HOST, **TEST_WS_PORT, "password": None, "username": None, "name": "hostname", "timeout": DEFAULT_TIMEOUT, } assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_discovery_cannot_connect_http(hass): """Test discovery aborts if cannot connect.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=CannotConnectError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "abort" assert result["reason"] == "cannot_connect" async def test_discovery_cannot_connect_ws(hass): """Test discovery aborts if cannot connect to websocket.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=CannotConnectError), ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {} async def test_discovery_exception_http(hass, user_flow): """Test we handle generic exception during discovery validation.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=Exception, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "abort" assert result["reason"] == "unknown" async def test_discovery_invalid_auth(hass): """Test we handle invalid auth during discovery.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=InvalidAuthError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "form" assert result["step_id"] == "credentials" assert result["errors"] == {} async def test_discovery_duplicate_data(hass): """Test discovery aborts if same mDNS packet arrives.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "form" assert result["step_id"] == "discovery_confirm" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "abort" assert result["reason"] == "already_in_progress" async def test_discovery_updates_unique_id(hass): """Test a duplicate discovery id aborts and updates existing entry.""" entry = MockConfigEntry( domain=DOMAIN, unique_id=UUID, data={"host": "dummy", "port": 11, "namename": "dummy.local."}, ) entry.add_to_hass(hass) result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "abort" assert result["reason"] == "already_configured" assert entry.data["host"] == "1.1.1.1" assert entry.data["port"] == 8080 assert entry.data["name"] == "hostname" async def test_discovery_without_unique_id(hass): """Test a discovery flow with no unique id aborts.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY_WO_UUID ) assert result["type"] == "abort" assert result["reason"] == "no_uuid" async def test_form_import(hass): """Test we get the form with import source.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ), patch( "homeassistant.components.kodi.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.kodi.async_setup_entry", return_value=True, ) as mock_setup_entry: result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data=TEST_IMPORT, ) await hass.async_block_till_done() assert result["type"] == "create_entry" assert result["title"] == TEST_IMPORT["name"] assert result["data"] == TEST_IMPORT assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_form_import_invalid_auth(hass): """Test we handle invalid auth on import.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=InvalidAuthError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data=TEST_IMPORT, ) assert result["type"] == "abort" assert result["reason"] == "invalid_auth" async def test_form_import_cannot_connect(hass): """Test we handle cannot connect on import.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=CannotConnectError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data=TEST_IMPORT, ) assert result["type"] == "abort" assert result["reason"] == "cannot_connect" async def test_form_import_exception(hass): """Test we handle unknown exception on import.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=Exception, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data=TEST_IMPORT, ) assert result["type"] == "abort" assert result["reason"] == "unknown"
turbokongen/home-assistant
tests/components/kodi/test_config_flow.py
homeassistant/components/speedtestdotnet/__init__.py
""" Module with location helpers. detect_location_info and elevation are mocked by default during tests. """ import asyncio import collections import math from typing import Any, Dict, Optional, Tuple import aiohttp ELEVATION_URL = "https://api.open-elevation.com/api/v1/lookup" IP_API = "http://ip-api.com/json" IPAPI = "https://ipapi.co/json/" # Constants from https://github.com/maurycyp/vincenty # Earth ellipsoid according to WGS 84 # Axis a of the ellipsoid (Radius of the earth in meters) AXIS_A = 6378137 # Flattening f = (a-b) / a FLATTENING = 1 / 298.257223563 # Axis b of the ellipsoid in meters. AXIS_B = 6356752.314245 MILES_PER_KILOMETER = 0.621371 MAX_ITERATIONS = 200 CONVERGENCE_THRESHOLD = 1e-12 LocationInfo = collections.namedtuple( "LocationInfo", [ "ip", "country_code", "country_name", "region_code", "region_name", "city", "zip_code", "time_zone", "latitude", "longitude", "use_metric", ], ) async def async_detect_location_info( session: aiohttp.ClientSession, ) -> Optional[LocationInfo]: """Detect location information.""" data = await _get_ipapi(session) if data is None: data = await _get_ip_api(session) if data is None: return None data["use_metric"] = data["country_code"] not in ("US", "MM", "LR") return LocationInfo(**data) def distance( lat1: Optional[float], lon1: Optional[float], lat2: float, lon2: float ) -> Optional[float]: """Calculate the distance in meters between two points. Async friendly. """ if lat1 is None or lon1 is None: return None result = vincenty((lat1, lon1), (lat2, lon2)) if result is None: return None return result * 1000 # Author: https://github.com/maurycyp # Source: https://github.com/maurycyp/vincenty # License: https://github.com/maurycyp/vincenty/blob/master/LICENSE def vincenty( point1: Tuple[float, float], point2: Tuple[float, float], miles: bool = False ) -> Optional[float]: """ Vincenty formula (inverse method) to calculate the distance. Result in kilometers or miles between two points on the surface of a spheroid. Async friendly. """ # short-circuit coincident points if point1[0] == point2[0] and point1[1] == point2[1]: return 0.0 # pylint: disable=invalid-name U1 = math.atan((1 - FLATTENING) * math.tan(math.radians(point1[0]))) U2 = math.atan((1 - FLATTENING) * math.tan(math.radians(point2[0]))) L = math.radians(point2[1] - point1[1]) Lambda = L sinU1 = math.sin(U1) cosU1 = math.cos(U1) sinU2 = math.sin(U2) cosU2 = math.cos(U2) for _ in range(MAX_ITERATIONS): sinLambda = math.sin(Lambda) cosLambda = math.cos(Lambda) sinSigma = math.sqrt( (cosU2 * sinLambda) ** 2 + (cosU1 * sinU2 - sinU1 * cosU2 * cosLambda) ** 2 ) if sinSigma == 0.0: return 0.0 # coincident points cosSigma = sinU1 * sinU2 + cosU1 * cosU2 * cosLambda sigma = math.atan2(sinSigma, cosSigma) sinAlpha = cosU1 * cosU2 * sinLambda / sinSigma cosSqAlpha = 1 - sinAlpha ** 2 try: cos2SigmaM = cosSigma - 2 * sinU1 * sinU2 / cosSqAlpha except ZeroDivisionError: cos2SigmaM = 0 C = FLATTENING / 16 * cosSqAlpha * (4 + FLATTENING * (4 - 3 * cosSqAlpha)) LambdaPrev = Lambda Lambda = L + (1 - C) * FLATTENING * sinAlpha * ( sigma + C * sinSigma * (cos2SigmaM + C * cosSigma * (-1 + 2 * cos2SigmaM ** 2)) ) if abs(Lambda - LambdaPrev) < CONVERGENCE_THRESHOLD: break # successful convergence else: return None # failure to converge uSq = cosSqAlpha * (AXIS_A ** 2 - AXIS_B ** 2) / (AXIS_B ** 2) A = 1 + uSq / 16384 * (4096 + uSq * (-768 + uSq * (320 - 175 * uSq))) B = uSq / 1024 * (256 + uSq * (-128 + uSq * (74 - 47 * uSq))) deltaSigma = ( B * sinSigma * ( cos2SigmaM + B / 4 * ( cosSigma * (-1 + 2 * cos2SigmaM ** 2) - B / 6 * cos2SigmaM * (-3 + 4 * sinSigma ** 2) * (-3 + 4 * cos2SigmaM ** 2) ) ) ) s = AXIS_B * A * (sigma - deltaSigma) s /= 1000 # Conversion of meters to kilometers if miles: s *= MILES_PER_KILOMETER # kilometers to miles return round(s, 6) async def _get_ipapi(session: aiohttp.ClientSession) -> Optional[Dict[str, Any]]: """Query ipapi.co for location data.""" try: resp = await session.get(IPAPI, timeout=5) except (aiohttp.ClientError, asyncio.TimeoutError): return None try: raw_info = await resp.json() except (aiohttp.ClientError, ValueError): return None # ipapi allows 30k free requests/month. Some users exhaust those. if raw_info.get("latitude") == "Sign up to access": return None return { "ip": raw_info.get("ip"), "country_code": raw_info.get("country"), "country_name": raw_info.get("country_name"), "region_code": raw_info.get("region_code"), "region_name": raw_info.get("region"), "city": raw_info.get("city"), "zip_code": raw_info.get("postal"), "time_zone": raw_info.get("timezone"), "latitude": raw_info.get("latitude"), "longitude": raw_info.get("longitude"), } async def _get_ip_api(session: aiohttp.ClientSession) -> Optional[Dict[str, Any]]: """Query ip-api.com for location data.""" try: resp = await session.get(IP_API, timeout=5) except (aiohttp.ClientError, asyncio.TimeoutError): return None try: raw_info = await resp.json() except (aiohttp.ClientError, ValueError): return None return { "ip": raw_info.get("query"), "country_code": raw_info.get("countryCode"), "country_name": raw_info.get("country"), "region_code": raw_info.get("region"), "region_name": raw_info.get("regionName"), "city": raw_info.get("city"), "zip_code": raw_info.get("zip"), "time_zone": raw_info.get("timezone"), "latitude": raw_info.get("lat"), "longitude": raw_info.get("lon"), }
"""Test the Kodi config flow.""" from unittest.mock import AsyncMock, PropertyMock, patch import pytest from homeassistant import config_entries from homeassistant.components.kodi.config_flow import ( CannotConnectError, InvalidAuthError, ) from homeassistant.components.kodi.const import DEFAULT_TIMEOUT, DOMAIN from .util import ( TEST_CREDENTIALS, TEST_DISCOVERY, TEST_DISCOVERY_WO_UUID, TEST_HOST, TEST_IMPORT, TEST_WS_PORT, UUID, MockConnection, MockWSConnection, get_kodi_connection, ) from tests.common import MockConfigEntry @pytest.fixture async def user_flow(hass): """Return a user-initiated flow after filling in host info.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) assert result["type"] == "form" assert result["errors"] == {} return result["flow_id"] async def test_user_flow(hass, user_flow): """Test a successful user initiated flow.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ), patch( "homeassistant.components.kodi.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.kodi.async_setup_entry", return_value=True, ) as mock_setup_entry: result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) await hass.async_block_till_done() assert result["type"] == "create_entry" assert result["title"] == TEST_HOST["host"] assert result["data"] == { **TEST_HOST, **TEST_WS_PORT, "password": None, "username": None, "name": None, "timeout": DEFAULT_TIMEOUT, } assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_form_valid_auth(hass, user_flow): """Test we handle valid auth.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=InvalidAuthError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "credentials" assert result["errors"] == {} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ), patch( "homeassistant.components.kodi.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.kodi.async_setup_entry", return_value=True, ) as mock_setup_entry: result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_CREDENTIALS ) await hass.async_block_till_done() assert result["type"] == "create_entry" assert result["title"] == TEST_HOST["host"] assert result["data"] == { **TEST_HOST, **TEST_WS_PORT, **TEST_CREDENTIALS, "name": None, "timeout": DEFAULT_TIMEOUT, } assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_form_valid_ws_port(hass, user_flow): """Test we handle valid websocket port.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=CannotConnectError), ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ), patch( "homeassistant.components.kodi.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.kodi.async_setup_entry", return_value=True, ) as mock_setup_entry: result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_WS_PORT ) await hass.async_block_till_done() assert result["type"] == "create_entry" assert result["title"] == TEST_HOST["host"] assert result["data"] == { **TEST_HOST, **TEST_WS_PORT, "password": None, "username": None, "name": None, "timeout": DEFAULT_TIMEOUT, } assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_form_empty_ws_port(hass, user_flow): """Test we handle an empty websocket port input.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=CannotConnectError), ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {} with patch( "homeassistant.components.kodi.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.kodi.async_setup_entry", return_value=True, ) as mock_setup_entry: result = await hass.config_entries.flow.async_configure( result["flow_id"], {"ws_port": 0} ) await hass.async_block_till_done() assert result["type"] == "create_entry" assert result["title"] == TEST_HOST["host"] assert result["data"] == { **TEST_HOST, "ws_port": None, "password": None, "username": None, "name": None, "timeout": DEFAULT_TIMEOUT, } assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_form_invalid_auth(hass, user_flow): """Test we handle invalid auth.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=InvalidAuthError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "credentials" assert result["errors"] == {} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=InvalidAuthError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_CREDENTIALS ) assert result["type"] == "form" assert result["step_id"] == "credentials" assert result["errors"] == {"base": "invalid_auth"} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=CannotConnectError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_CREDENTIALS ) assert result["type"] == "form" assert result["step_id"] == "credentials" assert result["errors"] == {"base": "cannot_connect"} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=Exception, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_CREDENTIALS ) assert result["type"] == "form" assert result["step_id"] == "credentials" assert result["errors"] == {"base": "unknown"} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=CannotConnectError), ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_CREDENTIALS ) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {} async def test_form_cannot_connect_http(hass, user_flow): """Test we handle cannot connect over HTTP error.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=CannotConnectError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "user" assert result["errors"] == {"base": "cannot_connect"} async def test_form_exception_http(hass, user_flow): """Test we handle generic exception over HTTP.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=Exception, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "user" assert result["errors"] == {"base": "unknown"} async def test_form_cannot_connect_ws(hass, user_flow): """Test we handle cannot connect over WebSocket error.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=CannotConnectError), ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connected", new_callable=PropertyMock(return_value=False) ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_WS_PORT ) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {"base": "cannot_connect"} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=CannotConnectError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_WS_PORT ) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {"base": "cannot_connect"} async def test_form_exception_ws(hass, user_flow): """Test we handle generic exception over WebSocket.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=CannotConnectError), ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure(user_flow, TEST_HOST) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {} with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=Exception) ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_WS_PORT ) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {"base": "unknown"} async def test_discovery(hass): """Test discovery flow works.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "form" assert result["step_id"] == "discovery_confirm" with patch( "homeassistant.components.kodi.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.kodi.async_setup_entry", return_value=True, ) as mock_setup_entry: result = await hass.config_entries.flow.async_configure( flow_id=result["flow_id"], user_input={} ) await hass.async_block_till_done() assert result["type"] == "create_entry" assert result["title"] == "hostname" assert result["data"] == { **TEST_HOST, **TEST_WS_PORT, "password": None, "username": None, "name": "hostname", "timeout": DEFAULT_TIMEOUT, } assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_discovery_cannot_connect_http(hass): """Test discovery aborts if cannot connect.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=CannotConnectError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "abort" assert result["reason"] == "cannot_connect" async def test_discovery_cannot_connect_ws(hass): """Test discovery aborts if cannot connect to websocket.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch.object( MockWSConnection, "connect", AsyncMock(side_effect=CannotConnectError), ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", new=get_kodi_connection, ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "form" assert result["step_id"] == "ws_port" assert result["errors"] == {} async def test_discovery_exception_http(hass, user_flow): """Test we handle generic exception during discovery validation.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=Exception, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "abort" assert result["reason"] == "unknown" async def test_discovery_invalid_auth(hass): """Test we handle invalid auth during discovery.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=InvalidAuthError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "form" assert result["step_id"] == "credentials" assert result["errors"] == {} async def test_discovery_duplicate_data(hass): """Test discovery aborts if same mDNS packet arrives.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "form" assert result["step_id"] == "discovery_confirm" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "abort" assert result["reason"] == "already_in_progress" async def test_discovery_updates_unique_id(hass): """Test a duplicate discovery id aborts and updates existing entry.""" entry = MockConfigEntry( domain=DOMAIN, unique_id=UUID, data={"host": "dummy", "port": 11, "namename": "dummy.local."}, ) entry.add_to_hass(hass) result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY ) assert result["type"] == "abort" assert result["reason"] == "already_configured" assert entry.data["host"] == "1.1.1.1" assert entry.data["port"] == 8080 assert entry.data["name"] == "hostname" async def test_discovery_without_unique_id(hass): """Test a discovery flow with no unique id aborts.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY_WO_UUID ) assert result["type"] == "abort" assert result["reason"] == "no_uuid" async def test_form_import(hass): """Test we get the form with import source.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", return_value=True, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ), patch( "homeassistant.components.kodi.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.kodi.async_setup_entry", return_value=True, ) as mock_setup_entry: result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data=TEST_IMPORT, ) await hass.async_block_till_done() assert result["type"] == "create_entry" assert result["title"] == TEST_IMPORT["name"] assert result["data"] == TEST_IMPORT assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_form_import_invalid_auth(hass): """Test we handle invalid auth on import.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=InvalidAuthError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data=TEST_IMPORT, ) assert result["type"] == "abort" assert result["reason"] == "invalid_auth" async def test_form_import_cannot_connect(hass): """Test we handle cannot connect on import.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=CannotConnectError, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data=TEST_IMPORT, ) assert result["type"] == "abort" assert result["reason"] == "cannot_connect" async def test_form_import_exception(hass): """Test we handle unknown exception on import.""" with patch( "homeassistant.components.kodi.config_flow.Kodi.ping", side_effect=Exception, ), patch( "homeassistant.components.kodi.config_flow.get_kodi_connection", return_value=MockConnection(), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data=TEST_IMPORT, ) assert result["type"] == "abort" assert result["reason"] == "unknown"
turbokongen/home-assistant
tests/components/kodi/test_config_flow.py
homeassistant/util/location.py
# -*- coding: utf-8 -*- # Copyright 2007-2020 The HyperSpy developers # # This file is part of HyperSpy. # # HyperSpy is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # HyperSpy is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with HyperSpy. If not, see <http://www.gnu.org/licenses/>. # Set the PyQt API to 2 to avoid incompatibilities between matplotlib # traitsui import logging _logger = logging.getLogger(__name__) from hyperspy.logger import set_log_level from hyperspy.defaults_parser import preferences set_log_level(preferences.General.logging_level) from hyperspy import datasets from hyperspy.utils import * from hyperspy.io import load from hyperspy import signals from hyperspy.Release import version as __version__ from hyperspy import docstrings __doc__ = """ All public packages, functions and classes are available in this module. %s Functions: create_model Create a model for curve fitting. get_configuration_directory_path Return the configuration directory path. load Load data into BaseSignal instances from supported files. preferences Preferences class instance to configure the default value of different parameters. It has a CLI and a GUI that can be started by execting its `gui` method i.e. `preferences.gui()`. stack Stack several signals. interactive Define operations that are automatically recomputed on event changes. set_log_level Convenience function to set HyperSpy's the log level. The :mod:`~hyperspy.api` package contains the following submodules/packages: :mod:`~hyperspy.api.signals` `Signal` classes which are the core of HyperSpy. Use this modules to create `Signal` instances manually from numpy arrays. Note that to load data from supported file formats is more convenient to use the `load` function. :mod:`~hyperspy.api.model` Contains the :mod:`~hyperspy.api.model.components` module with components that can be used to create a model for curve fitting. :mod:`~hyperspy.api.eds` Functions for energy dispersive X-rays data analysis. :mod:`~hyperspy.api.material` Useful functions for materials properties and elements database that includes physical properties and X-rays and EELS energies. :mod:`~hyperspy.api.plot` Plotting functions that operate on multiple signals. :mod:`~hyperspy.api.datasets` Example datasets. :mod:`~hyperspy.api.roi` Region of interests (ROIs) that operate on `BaseSignal` instances and include widgets for interactive operation. :mod:`~hyperspy.api.samfire` SAMFire utilities (strategies, Pool, fit convergence tests) For more details see their doctrings. """ % docstrings.START_HSPY # Remove the module to avoid polluting the namespace del docstrings def get_configuration_directory_path(): import hyperspy.misc.config_dir return hyperspy.misc.config_dir.config_path
# -*- coding: utf-8 -*- # Copyright 2007-2020 The HyperSpy developers # # This file is part of HyperSpy. # # HyperSpy is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # HyperSpy is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with HyperSpy. If not, see <http://www.gnu.org/licenses/>. import sys from unittest import mock import numpy as np import pytest from hyperspy import signals from hyperspy.decorators import lazifyTestClass from hyperspy.signal_tools import SpikesRemoval, SpikesRemovalInteractive def _verify_test_sum_x_E(self, s): np.testing.assert_array_equal(self.signal.data.sum(), s.data) assert s.data.ndim == 1 # Check that there is still one signal axis. assert s.axes_manager.signal_dimension == 1 def _test_default_navigation_signal_operations_over_many_axes(self, op): s = getattr(self.signal, op)() ar = getattr(self.data, op)(axis=(0, 1)) np.testing.assert_array_equal(ar, s.data) assert s.data.ndim == 1 assert s.axes_manager.signal_dimension == 1 assert s.axes_manager.navigation_dimension == 0 def test_signal_iterator(): sig = signals.Signal1D(np.arange(3).reshape((3, 1))) for s in (sig, sig.as_lazy()): assert next(s).data[0] == 0 # If the following fails it can be because the iteration index was not # restarted for i, signal in enumerate(s): assert i == signal.data[0] @lazifyTestClass class TestDerivative: def setup_method(self, method): offset = 3 scale = 0.1 x = np.arange(-offset, offset, scale) s = signals.Signal1D(np.sin(x)) s.axes_manager[0].offset = x[0] s.axes_manager[0].scale = scale self.s = s def test_derivative_data(self): der = self.s.derivative(axis=0, order=4) np.testing.assert_allclose( der.data, np.sin(der.axes_manager[0].axis), atol=1e-2 ) @lazifyTestClass class TestOutArg: def setup_method(self, method): # Some test require consistent random data for reference to be correct np.random.seed(0) s = signals.Signal1D(np.random.rand(5, 4, 3, 6)) for axis, name in zip( s.axes_manager._get_axes_in_natural_order(), ["x", "y", "z", "E"] ): axis.name = name self.s = s def _run_single(self, f, s, kwargs): m = mock.Mock() s1 = f(**kwargs) s1.events.data_changed.connect(m.data_changed) s.data = s.data + 2 s2 = f(**kwargs) r = f(out=s1, **kwargs) m.data_changed.assert_called_with(obj=s1) assert r is None np.testing.assert_array_equal(s1.data, s2.data) def test_get_histogram(self): self._run_single(self.s.get_histogram, self.s, {}) if self.s._lazy: self._run_single(self.s.get_histogram, self.s, {"rechunk": False}) def test_sum(self): self._run_single(self.s.sum, self.s, dict(axis=("x", "z"))) self._run_single(self.s.sum, self.s.get_current_signal(), dict(axis=0)) def test_sum_return_1d_signal(self): self._run_single(self.s.sum, self.s, dict(axis=self.s.axes_manager._axes)) self._run_single(self.s.sum, self.s.get_current_signal(), dict(axis=0)) def test_mean(self): self._run_single(self.s.mean, self.s, dict(axis=("x", "z"))) def test_max(self): self._run_single(self.s.max, self.s, dict(axis=("x", "z"))) def test_min(self): self._run_single(self.s.min, self.s, dict(axis=("x", "z"))) def test_std(self): self._run_single(self.s.std, self.s, dict(axis=("x", "z"))) def test_var(self): self._run_single(self.s.var, self.s, dict(axis=("x", "z"))) def test_diff(self): self._run_single(self.s.diff, self.s, dict(axis=0)) def test_derivative(self): self._run_single(self.s.derivative, self.s, dict(axis=0)) def test_integrate_simpson(self): self._run_single(self.s.integrate_simpson, self.s, dict(axis=0)) def test_integrate1D(self): self._run_single(self.s.integrate1D, self.s, dict(axis=0)) def test_indexmax(self): self._run_single(self.s.indexmax, self.s, dict(axis=0)) def test_valuemax(self): self._run_single(self.s.valuemax, self.s, dict(axis=0)) @pytest.mark.xfail( sys.platform == "win32", reason="sometimes it does not run lazily on windows" ) def test_rebin(self): s = self.s scale = (1, 2, 1, 2) self._run_single(s.rebin, s, dict(scale=scale)) def test_as_spectrum(self): s = self.s self._run_single(s.as_signal1D, s, dict(spectral_axis=1)) def test_as_image(self): s = self.s self._run_single( s.as_signal2D, s, dict(image_axes=(s.axes_manager.navigation_axes[0:2])) ) def test_inav(self): s = self.s self._run_single( s.inav.__getitem__, s, {"slices": (slice(2, 4, None), slice(None), slice(0, 2, None))}, ) def test_isig(self): s = self.s self._run_single(s.isig.__getitem__, s, {"slices": (slice(2, 4, None),)}) def test_inav_variance(self): s = self.s s.metadata.set_item("Signal.Noise_properties.variance", s.deepcopy()) s1 = s.inav[2:4, 0:2] s2 = s.inav[2:4, 1:3] s.inav.__getitem__( slices=(slice(2, 4, None), slice(1, 3, None), slice(None)), out=s1 ) np.testing.assert_array_equal( s1.metadata.Signal.Noise_properties.variance.data, s2.metadata.Signal.Noise_properties.variance.data, ) def test_isig_variance(self): s = self.s s.metadata.set_item("Signal.Noise_properties.variance", s.deepcopy()) s1 = s.isig[2:4] s2 = s.isig[1:5] s.isig.__getitem__(slices=(slice(1, 5, None)), out=s1) np.testing.assert_array_equal( s1.metadata.Signal.Noise_properties.variance.data, s2.metadata.Signal.Noise_properties.variance.data, ) def test_histogram_axis_changes(self): s = self.s h1 = s.get_histogram(bins=4) h2 = s.get_histogram(bins=5) s.get_histogram(bins=5, out=h1) np.testing.assert_array_equal(h1.data, h2.data) assert h1.axes_manager[-1].size == h2.axes_manager[-1].size def test_masked_array_mean(self): s = self.s if s._lazy: pytest.skip("LazySignals do not support masked arrays") mask = s.data > 0.5 s.data = np.arange(s.data.size).reshape(s.data.shape) s.data = np.ma.masked_array(s.data, mask=mask) sr = s.mean(axis=("x", "z",)) np.testing.assert_array_equal( sr.data.shape, [ax.size for ax in s.axes_manager[("y", "E")]] ) print(sr.data.tolist()) ref = [ [ 202.28571428571428, 203.28571428571428, 182.0, 197.66666666666666, 187.0, 177.8, ], [ 134.0, 190.0, 191.27272727272728, 170.14285714285714, 172.0, 209.85714285714286, ], [168.0, 161.8, 162.8, 185.4, 197.71428571428572, 178.14285714285714], [240.0, 184.33333333333334, 260.0, 229.0, 173.2, 167.0], ] np.testing.assert_array_equal(sr.data, ref) def test_masked_array_sum(self): s = self.s if s._lazy: pytest.skip("LazySignals do not support masked arrays") mask = s.data > 0.5 s.data = np.ma.masked_array(np.ones_like(s.data), mask=mask) sr = s.sum(axis=("x", "z",)) np.testing.assert_array_equal(sr.data.sum(), (~mask).sum()) @pytest.mark.parametrize("mask", (True, False)) def test_sum_no_navigation_axis(self, mask): s = signals.Signal1D(np.arange(100)) if mask: s.data = np.ma.masked_array(s.data, mask=(s < 50)) # Since s haven't any navigation axis, it returns the same signal as # default np.testing.assert_array_equal(s, s.sum()) # When we specify an axis, it actually takes the sum. np.testing.assert_array_equal(s.data.sum(), s.sum(axis=0)) def test_masked_arrays_out(self): s = self.s if s._lazy: pytest.skip("LazySignals do not support masked arrays") mask = s.data > 0.5 s.data = np.ones_like(s.data) s.data = np.ma.masked_array(s.data, mask=mask) self._run_single(s.sum, s, dict(axis=("x", "z"))) def test_wrong_out_shape(self): s = self.s ss = s.sum() # Sum over navigation, data shape (6,) with pytest.raises(ValueError): s.sum(axis=s.axes_manager._axes, out=ss) def test_wrong_out_shape_masked(self): s = self.s s.data = np.ma.array(s.data) ss = s.sum() # Sum over navigation, data shape (6,) with pytest.raises(ValueError): s.sum(axis=s.axes_manager._axes, out=ss)
erh3cq/hyperspy
hyperspy/tests/signal/test_tools.py
hyperspy/api_nogui.py
import numpy as np from hyperspy.decorators import lazifyTestClass from hyperspy.signal import BaseSignal @lazifyTestClass class TestSignalFolding: def setup_method(self, method): self.s = BaseSignal(np.zeros((2, 3, 4, 5))) self.s = self.s.transpose(signal_axes=2) def test_unfold_navigation(self): s = self.s.deepcopy() s.unfold_navigation_space() assert (s.axes_manager.navigation_shape == (self.s.axes_manager.navigation_size,)) def test_unfold_signal(self): s = self.s.deepcopy() s.unfold_signal_space() assert (s.axes_manager.signal_shape == (self.s.axes_manager.signal_size,)) def test_unfolded_repr(self): self.s.unfold() assert "unfolded" in repr(self.s) def test_unfold_navigation_by_keyword(self): s = self.s.deepcopy() s.unfold(unfold_navigation=True, unfold_signal=False) assert (s.axes_manager.navigation_shape == (self.s.axes_manager.navigation_size,)) def test_unfold_signal_by_keyword(self): s = self.s.deepcopy() s.unfold(unfold_navigation=False, unfold_signal=True) assert (s.axes_manager.signal_shape == (self.s.axes_manager.signal_size,)) def test_unfold_nothing_by_keyword(self): s = self.s.deepcopy() s.unfold(unfold_navigation=False, unfold_signal=False) assert s.data.shape == self.s.data.shape def test_unfold_full_by_keyword(self): s = self.s.deepcopy() s.unfold(unfold_navigation=True, unfold_signal=True) assert (s.axes_manager.signal_shape == (self.s.axes_manager.signal_size,)) assert (s.axes_manager.navigation_shape == (self.s.axes_manager.navigation_size,)) def test_unfolded_context_manager(self): s = self.s.deepcopy() with s.unfolded(): # Check that both spaces unfold as expected assert (s.axes_manager.navigation_shape == (self.s.axes_manager.navigation_size,)) assert (s.axes_manager.signal_shape == (self.s.axes_manager.signal_size,)) # Check that it folds back as expected assert (s.axes_manager.navigation_shape == self.s.axes_manager.navigation_shape) assert (s.axes_manager.signal_shape == self.s.axes_manager.signal_shape) def test_unfolded_full_by_keywords(self): s = self.s.deepcopy() with s.unfolded(unfold_navigation=True, unfold_signal=True) as folded: assert folded # Check that both spaces unfold as expected assert (s.axes_manager.navigation_shape == (self.s.axes_manager.navigation_size,)) assert (s.axes_manager.signal_shape == (self.s.axes_manager.signal_size,)) # Check that it folds back as expected assert (s.axes_manager.navigation_shape == self.s.axes_manager.navigation_shape) assert (s.axes_manager.signal_shape == self.s.axes_manager.signal_shape) def test_unfolded_navigation_by_keyword(self): s = self.s.deepcopy() with s.unfolded(unfold_navigation=True, unfold_signal=False) as folded: assert folded # Check that only navigation space unfolded assert (s.axes_manager.navigation_shape == (self.s.axes_manager.navigation_size,)) assert (s.axes_manager.signal_shape == self.s.axes_manager.signal_shape) # Check that it folds back as expected assert (s.axes_manager.navigation_shape == self.s.axes_manager.navigation_shape) assert (s.axes_manager.signal_shape == self.s.axes_manager.signal_shape) def test_unfolded_signal_by_keyword(self): s = self.s.deepcopy() with s.unfolded(unfold_navigation=False, unfold_signal=True) as folded: assert folded # Check that only signal space unfolded assert (s.axes_manager.navigation_shape == self.s.axes_manager.navigation_shape) assert (s.axes_manager.signal_shape == (self.s.axes_manager.signal_size,)) # Check that it folds back as expected assert (s.axes_manager.navigation_shape == self.s.axes_manager.navigation_shape) assert (s.axes_manager.signal_shape == self.s.axes_manager.signal_shape) def test_unfolded_nothin_by_keyword(self): s = self.s.deepcopy() with s.unfolded(False, False) as folded: assert not folded # Check that nothing folded assert (s.axes_manager.navigation_shape == self.s.axes_manager.navigation_shape) assert (s.axes_manager.signal_shape == self.s.axes_manager.signal_shape) # Check that it "folds back" as expected assert (s.axes_manager.navigation_shape == self.s.axes_manager.navigation_shape) assert (s.axes_manager.signal_shape == self.s.axes_manager.signal_shape) @lazifyTestClass class TestSignalVarianceFolding: def setup_method(self, method): self.s = BaseSignal(np.zeros((2, 3, 4, 5))) self.s = self.s.transpose(signal_axes=2) self.s.estimate_poissonian_noise_variance() def test_unfold_navigation(self): s = self.s.deepcopy() s.unfold_navigation_space() meta_am = s.metadata.Signal.Noise_properties.variance.axes_manager assert ( meta_am.navigation_shape == (self.s.axes_manager.navigation_size,)) def test_unfold_signal(self): s = self.s.deepcopy() s.unfold_signal_space() meta_am = s.metadata.Signal.Noise_properties.variance.axes_manager assert ( meta_am.signal_shape == (self.s.axes_manager.signal_size,))
# -*- coding: utf-8 -*- # Copyright 2007-2020 The HyperSpy developers # # This file is part of HyperSpy. # # HyperSpy is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # HyperSpy is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with HyperSpy. If not, see <http://www.gnu.org/licenses/>. import sys from unittest import mock import numpy as np import pytest from hyperspy import signals from hyperspy.decorators import lazifyTestClass from hyperspy.signal_tools import SpikesRemoval, SpikesRemovalInteractive def _verify_test_sum_x_E(self, s): np.testing.assert_array_equal(self.signal.data.sum(), s.data) assert s.data.ndim == 1 # Check that there is still one signal axis. assert s.axes_manager.signal_dimension == 1 def _test_default_navigation_signal_operations_over_many_axes(self, op): s = getattr(self.signal, op)() ar = getattr(self.data, op)(axis=(0, 1)) np.testing.assert_array_equal(ar, s.data) assert s.data.ndim == 1 assert s.axes_manager.signal_dimension == 1 assert s.axes_manager.navigation_dimension == 0 def test_signal_iterator(): sig = signals.Signal1D(np.arange(3).reshape((3, 1))) for s in (sig, sig.as_lazy()): assert next(s).data[0] == 0 # If the following fails it can be because the iteration index was not # restarted for i, signal in enumerate(s): assert i == signal.data[0] @lazifyTestClass class TestDerivative: def setup_method(self, method): offset = 3 scale = 0.1 x = np.arange(-offset, offset, scale) s = signals.Signal1D(np.sin(x)) s.axes_manager[0].offset = x[0] s.axes_manager[0].scale = scale self.s = s def test_derivative_data(self): der = self.s.derivative(axis=0, order=4) np.testing.assert_allclose( der.data, np.sin(der.axes_manager[0].axis), atol=1e-2 ) @lazifyTestClass class TestOutArg: def setup_method(self, method): # Some test require consistent random data for reference to be correct np.random.seed(0) s = signals.Signal1D(np.random.rand(5, 4, 3, 6)) for axis, name in zip( s.axes_manager._get_axes_in_natural_order(), ["x", "y", "z", "E"] ): axis.name = name self.s = s def _run_single(self, f, s, kwargs): m = mock.Mock() s1 = f(**kwargs) s1.events.data_changed.connect(m.data_changed) s.data = s.data + 2 s2 = f(**kwargs) r = f(out=s1, **kwargs) m.data_changed.assert_called_with(obj=s1) assert r is None np.testing.assert_array_equal(s1.data, s2.data) def test_get_histogram(self): self._run_single(self.s.get_histogram, self.s, {}) if self.s._lazy: self._run_single(self.s.get_histogram, self.s, {"rechunk": False}) def test_sum(self): self._run_single(self.s.sum, self.s, dict(axis=("x", "z"))) self._run_single(self.s.sum, self.s.get_current_signal(), dict(axis=0)) def test_sum_return_1d_signal(self): self._run_single(self.s.sum, self.s, dict(axis=self.s.axes_manager._axes)) self._run_single(self.s.sum, self.s.get_current_signal(), dict(axis=0)) def test_mean(self): self._run_single(self.s.mean, self.s, dict(axis=("x", "z"))) def test_max(self): self._run_single(self.s.max, self.s, dict(axis=("x", "z"))) def test_min(self): self._run_single(self.s.min, self.s, dict(axis=("x", "z"))) def test_std(self): self._run_single(self.s.std, self.s, dict(axis=("x", "z"))) def test_var(self): self._run_single(self.s.var, self.s, dict(axis=("x", "z"))) def test_diff(self): self._run_single(self.s.diff, self.s, dict(axis=0)) def test_derivative(self): self._run_single(self.s.derivative, self.s, dict(axis=0)) def test_integrate_simpson(self): self._run_single(self.s.integrate_simpson, self.s, dict(axis=0)) def test_integrate1D(self): self._run_single(self.s.integrate1D, self.s, dict(axis=0)) def test_indexmax(self): self._run_single(self.s.indexmax, self.s, dict(axis=0)) def test_valuemax(self): self._run_single(self.s.valuemax, self.s, dict(axis=0)) @pytest.mark.xfail( sys.platform == "win32", reason="sometimes it does not run lazily on windows" ) def test_rebin(self): s = self.s scale = (1, 2, 1, 2) self._run_single(s.rebin, s, dict(scale=scale)) def test_as_spectrum(self): s = self.s self._run_single(s.as_signal1D, s, dict(spectral_axis=1)) def test_as_image(self): s = self.s self._run_single( s.as_signal2D, s, dict(image_axes=(s.axes_manager.navigation_axes[0:2])) ) def test_inav(self): s = self.s self._run_single( s.inav.__getitem__, s, {"slices": (slice(2, 4, None), slice(None), slice(0, 2, None))}, ) def test_isig(self): s = self.s self._run_single(s.isig.__getitem__, s, {"slices": (slice(2, 4, None),)}) def test_inav_variance(self): s = self.s s.metadata.set_item("Signal.Noise_properties.variance", s.deepcopy()) s1 = s.inav[2:4, 0:2] s2 = s.inav[2:4, 1:3] s.inav.__getitem__( slices=(slice(2, 4, None), slice(1, 3, None), slice(None)), out=s1 ) np.testing.assert_array_equal( s1.metadata.Signal.Noise_properties.variance.data, s2.metadata.Signal.Noise_properties.variance.data, ) def test_isig_variance(self): s = self.s s.metadata.set_item("Signal.Noise_properties.variance", s.deepcopy()) s1 = s.isig[2:4] s2 = s.isig[1:5] s.isig.__getitem__(slices=(slice(1, 5, None)), out=s1) np.testing.assert_array_equal( s1.metadata.Signal.Noise_properties.variance.data, s2.metadata.Signal.Noise_properties.variance.data, ) def test_histogram_axis_changes(self): s = self.s h1 = s.get_histogram(bins=4) h2 = s.get_histogram(bins=5) s.get_histogram(bins=5, out=h1) np.testing.assert_array_equal(h1.data, h2.data) assert h1.axes_manager[-1].size == h2.axes_manager[-1].size def test_masked_array_mean(self): s = self.s if s._lazy: pytest.skip("LazySignals do not support masked arrays") mask = s.data > 0.5 s.data = np.arange(s.data.size).reshape(s.data.shape) s.data = np.ma.masked_array(s.data, mask=mask) sr = s.mean(axis=("x", "z",)) np.testing.assert_array_equal( sr.data.shape, [ax.size for ax in s.axes_manager[("y", "E")]] ) print(sr.data.tolist()) ref = [ [ 202.28571428571428, 203.28571428571428, 182.0, 197.66666666666666, 187.0, 177.8, ], [ 134.0, 190.0, 191.27272727272728, 170.14285714285714, 172.0, 209.85714285714286, ], [168.0, 161.8, 162.8, 185.4, 197.71428571428572, 178.14285714285714], [240.0, 184.33333333333334, 260.0, 229.0, 173.2, 167.0], ] np.testing.assert_array_equal(sr.data, ref) def test_masked_array_sum(self): s = self.s if s._lazy: pytest.skip("LazySignals do not support masked arrays") mask = s.data > 0.5 s.data = np.ma.masked_array(np.ones_like(s.data), mask=mask) sr = s.sum(axis=("x", "z",)) np.testing.assert_array_equal(sr.data.sum(), (~mask).sum()) @pytest.mark.parametrize("mask", (True, False)) def test_sum_no_navigation_axis(self, mask): s = signals.Signal1D(np.arange(100)) if mask: s.data = np.ma.masked_array(s.data, mask=(s < 50)) # Since s haven't any navigation axis, it returns the same signal as # default np.testing.assert_array_equal(s, s.sum()) # When we specify an axis, it actually takes the sum. np.testing.assert_array_equal(s.data.sum(), s.sum(axis=0)) def test_masked_arrays_out(self): s = self.s if s._lazy: pytest.skip("LazySignals do not support masked arrays") mask = s.data > 0.5 s.data = np.ones_like(s.data) s.data = np.ma.masked_array(s.data, mask=mask) self._run_single(s.sum, s, dict(axis=("x", "z"))) def test_wrong_out_shape(self): s = self.s ss = s.sum() # Sum over navigation, data shape (6,) with pytest.raises(ValueError): s.sum(axis=s.axes_manager._axes, out=ss) def test_wrong_out_shape_masked(self): s = self.s s.data = np.ma.array(s.data) ss = s.sum() # Sum over navigation, data shape (6,) with pytest.raises(ValueError): s.sum(axis=s.axes_manager._axes, out=ss)
erh3cq/hyperspy
hyperspy/tests/signal/test_tools.py
hyperspy/tests/signal/test_folding.py
""" DataFrame --------- An efficient 2D container for potentially mixed-type time series or other labeled data series. Similar to its R counterpart, data.frame, except providing automatic data alignment and a host of useful data manipulation methods having to do with the labeling information """ import collections from collections import OrderedDict, abc import functools from io import StringIO import itertools import sys import warnings from textwrap import dedent from typing import FrozenSet, List, Optional, Set, Type, Union import numpy as np import numpy.ma as ma from pandas._config import get_option from pandas._libs import lib, algos as libalgos from pandas.util._decorators import (Appender, Substitution, rewrite_axis_style_signature, deprecate_kwarg) from pandas.util._validators import (validate_bool_kwarg, validate_axis_style_args) from pandas.compat import PY36, raise_with_traceback from pandas.compat.numpy import function as nv from pandas.core.arrays.sparse import SparseFrameAccessor from pandas.core.dtypes.cast import ( maybe_upcast, cast_scalar_to_array, infer_dtype_from_scalar, maybe_cast_to_datetime, maybe_infer_to_datetimelike, maybe_convert_platform, maybe_downcast_to_dtype, invalidate_string_dtypes, coerce_to_dtypes, maybe_upcast_putmask, find_common_type) from pandas.core.dtypes.common import ( is_dict_like, is_datetime64tz_dtype, is_object_dtype, is_extension_type, is_extension_array_dtype, is_datetime64_any_dtype, is_bool_dtype, is_integer_dtype, is_float_dtype, is_integer, is_scalar, is_dtype_equal, needs_i8_conversion, infer_dtype_from_object, ensure_float64, ensure_int64, ensure_platform_int, is_list_like, is_nested_list_like, is_iterator, is_sequence, is_named_tuple) from pandas.core.dtypes.generic import ( ABCSeries, ABCDataFrame, ABCIndexClass, ABCMultiIndex) from pandas.core.dtypes.missing import isna, notna from pandas.core import algorithms from pandas.core import common as com from pandas.core import nanops from pandas.core import ops from pandas.core.accessor import CachedAccessor from pandas.core.arrays import Categorical, ExtensionArray from pandas.core.arrays.datetimelike import ( DatetimeLikeArrayMixin as DatetimeLikeArray ) from pandas.core.generic import NDFrame, _shared_docs from pandas.core.index import (Index, MultiIndex, ensure_index, ensure_index_from_sequences) from pandas.core.indexes import base as ibase from pandas.core.indexes.datetimes import DatetimeIndex from pandas.core.indexes.period import PeriodIndex from pandas.core.indexing import (maybe_droplevels, convert_to_index_sliceable, check_bool_indexer) from pandas.core.internals import BlockManager from pandas.core.internals.construction import ( masked_rec_array_to_mgr, get_names_from_index, to_arrays, reorder_arrays, init_ndarray, init_dict, arrays_to_mgr, sanitize_index) from pandas.core.series import Series from pandas.io.formats import console from pandas.io.formats import format as fmt from pandas.io.formats.printing import pprint_thing import pandas.plotting # --------------------------------------------------------------------- # Docstring templates _shared_doc_kwargs = dict( axes='index, columns', klass='DataFrame', axes_single_arg="{0 or 'index', 1 or 'columns'}", axis="""axis : {0 or 'index', 1 or 'columns'}, default 0 If 0 or 'index': apply function to each column. If 1 or 'columns': apply function to each row.""", optional_by=""" by : str or list of str Name or list of names to sort by. - if `axis` is 0 or `'index'` then `by` may contain index levels and/or column labels - if `axis` is 1 or `'columns'` then `by` may contain column levels and/or index labels .. versionchanged:: 0.23.0 Allow specifying index or column level names.""", versionadded_to_excel='', optional_labels="""labels : array-like, optional New labels / index to conform the axis specified by 'axis' to.""", optional_axis="""axis : int or str, optional Axis to target. Can be either the axis name ('index', 'columns') or number (0, 1).""", ) _numeric_only_doc = """numeric_only : boolean, default None Include only float, int, boolean data. If None, will attempt to use everything, then use only numeric data """ _merge_doc = """ Merge DataFrame or named Series objects with a database-style join. The join is done on columns or indexes. If joining columns on columns, the DataFrame indexes *will be ignored*. Otherwise if joining indexes on indexes or indexes on a column or columns, the index will be passed on. Parameters ----------%s right : DataFrame or named Series Object to merge with. how : {'left', 'right', 'outer', 'inner'}, default 'inner' Type of merge to be performed. * left: use only keys from left frame, similar to a SQL left outer join; preserve key order. * right: use only keys from right frame, similar to a SQL right outer join; preserve key order. * outer: use union of keys from both frames, similar to a SQL full outer join; sort keys lexicographically. * inner: use intersection of keys from both frames, similar to a SQL inner join; preserve the order of the left keys. on : label or list Column or index level names to join on. These must be found in both DataFrames. If `on` is None and not merging on indexes then this defaults to the intersection of the columns in both DataFrames. left_on : label or list, or array-like Column or index level names to join on in the left DataFrame. Can also be an array or list of arrays of the length of the left DataFrame. These arrays are treated as if they are columns. right_on : label or list, or array-like Column or index level names to join on in the right DataFrame. Can also be an array or list of arrays of the length of the right DataFrame. These arrays are treated as if they are columns. left_index : bool, default False Use the index from the left DataFrame as the join key(s). If it is a MultiIndex, the number of keys in the other DataFrame (either the index or a number of columns) must match the number of levels. right_index : bool, default False Use the index from the right DataFrame as the join key. Same caveats as left_index. sort : bool, default False Sort the join keys lexicographically in the result DataFrame. If False, the order of the join keys depends on the join type (how keyword). suffixes : tuple of (str, str), default ('_x', '_y') Suffix to apply to overlapping column names in the left and right side, respectively. To raise an exception on overlapping columns use (False, False). copy : bool, default True If False, avoid copy if possible. indicator : bool or str, default False If True, adds a column to output DataFrame called "_merge" with information on the source of each row. If string, column with information on source of each row will be added to output DataFrame, and column will be named value of string. Information column is Categorical-type and takes on a value of "left_only" for observations whose merge key only appears in 'left' DataFrame, "right_only" for observations whose merge key only appears in 'right' DataFrame, and "both" if the observation's merge key is found in both. validate : str, optional If specified, checks if merge is of specified type. * "one_to_one" or "1:1": check if merge keys are unique in both left and right datasets. * "one_to_many" or "1:m": check if merge keys are unique in left dataset. * "many_to_one" or "m:1": check if merge keys are unique in right dataset. * "many_to_many" or "m:m": allowed, but does not result in checks. .. versionadded:: 0.21.0 Returns ------- DataFrame A DataFrame of the two merged objects. See Also -------- merge_ordered : Merge with optional filling/interpolation. merge_asof : Merge on nearest keys. DataFrame.join : Similar method using indices. Notes ----- Support for specifying index levels as the `on`, `left_on`, and `right_on` parameters was added in version 0.23.0 Support for merging named Series objects was added in version 0.24.0 Examples -------- >>> df1 = pd.DataFrame({'lkey': ['foo', 'bar', 'baz', 'foo'], ... 'value': [1, 2, 3, 5]}) >>> df2 = pd.DataFrame({'rkey': ['foo', 'bar', 'baz', 'foo'], ... 'value': [5, 6, 7, 8]}) >>> df1 lkey value 0 foo 1 1 bar 2 2 baz 3 3 foo 5 >>> df2 rkey value 0 foo 5 1 bar 6 2 baz 7 3 foo 8 Merge df1 and df2 on the lkey and rkey columns. The value columns have the default suffixes, _x and _y, appended. >>> df1.merge(df2, left_on='lkey', right_on='rkey') lkey value_x rkey value_y 0 foo 1 foo 5 1 foo 1 foo 8 2 foo 5 foo 5 3 foo 5 foo 8 4 bar 2 bar 6 5 baz 3 baz 7 Merge DataFrames df1 and df2 with specified left and right suffixes appended to any overlapping columns. >>> df1.merge(df2, left_on='lkey', right_on='rkey', ... suffixes=('_left', '_right')) lkey value_left rkey value_right 0 foo 1 foo 5 1 foo 1 foo 8 2 foo 5 foo 5 3 foo 5 foo 8 4 bar 2 bar 6 5 baz 3 baz 7 Merge DataFrames df1 and df2, but raise an exception if the DataFrames have any overlapping columns. >>> df1.merge(df2, left_on='lkey', right_on='rkey', suffixes=(False, False)) Traceback (most recent call last): ... ValueError: columns overlap but no suffix specified: Index(['value'], dtype='object') """ # ----------------------------------------------------------------------- # DataFrame class class DataFrame(NDFrame): """ Two-dimensional size-mutable, potentially heterogeneous tabular data structure with labeled axes (rows and columns). Arithmetic operations align on both row and column labels. Can be thought of as a dict-like container for Series objects. The primary pandas data structure. Parameters ---------- data : ndarray (structured or homogeneous), Iterable, dict, or DataFrame Dict can contain Series, arrays, constants, or list-like objects .. versionchanged :: 0.23.0 If data is a dict, argument order is maintained for Python 3.6 and later. index : Index or array-like Index to use for resulting frame. Will default to RangeIndex if no indexing information part of input data and no index provided columns : Index or array-like Column labels to use for resulting frame. Will default to RangeIndex (0, 1, 2, ..., n) if no column labels are provided dtype : dtype, default None Data type to force. Only a single dtype is allowed. If None, infer copy : boolean, default False Copy data from inputs. Only affects DataFrame / 2d ndarray input See Also -------- DataFrame.from_records : Constructor from tuples, also record arrays. DataFrame.from_dict : From dicts of Series, arrays, or dicts. DataFrame.from_items : From sequence of (key, value) pairs read_csv, pandas.read_table, pandas.read_clipboard. Examples -------- Constructing DataFrame from a dictionary. >>> d = {'col1': [1, 2], 'col2': [3, 4]} >>> df = pd.DataFrame(data=d) >>> df col1 col2 0 1 3 1 2 4 Notice that the inferred dtype is int64. >>> df.dtypes col1 int64 col2 int64 dtype: object To enforce a single dtype: >>> df = pd.DataFrame(data=d, dtype=np.int8) >>> df.dtypes col1 int8 col2 int8 dtype: object Constructing DataFrame from numpy ndarray: >>> df2 = pd.DataFrame(np.array([[1, 2, 3], [4, 5, 6], [7, 8, 9]]), ... columns=['a', 'b', 'c']) >>> df2 a b c 0 1 2 3 1 4 5 6 2 7 8 9 """ @property def _constructor(self): return DataFrame _constructor_sliced = Series # type: Type[Series] _deprecations = NDFrame._deprecations | frozenset([ 'get_value', 'set_value', 'from_csv', 'from_items' ]) # type: FrozenSet[str] _accessors = set() # type: Set[str] @property def _constructor_expanddim(self): raise NotImplementedError("Not supported for DataFrames!") # ---------------------------------------------------------------------- # Constructors def __init__(self, data=None, index=None, columns=None, dtype=None, copy=False): if data is None: data = {} if dtype is not None: dtype = self._validate_dtype(dtype) if isinstance(data, DataFrame): data = data._data if isinstance(data, BlockManager): mgr = self._init_mgr(data, axes=dict(index=index, columns=columns), dtype=dtype, copy=copy) elif isinstance(data, dict): mgr = init_dict(data, index, columns, dtype=dtype) elif isinstance(data, ma.MaskedArray): import numpy.ma.mrecords as mrecords # masked recarray if isinstance(data, mrecords.MaskedRecords): mgr = masked_rec_array_to_mgr(data, index, columns, dtype, copy) # a masked array else: mask = ma.getmaskarray(data) if mask.any(): data, fill_value = maybe_upcast(data, copy=True) data.soften_mask() # set hardmask False if it was True data[mask] = fill_value else: data = data.copy() mgr = init_ndarray(data, index, columns, dtype=dtype, copy=copy) elif isinstance(data, (np.ndarray, Series, Index)): if data.dtype.names: data_columns = list(data.dtype.names) data = {k: data[k] for k in data_columns} if columns is None: columns = data_columns mgr = init_dict(data, index, columns, dtype=dtype) elif getattr(data, 'name', None) is not None: mgr = init_dict({data.name: data}, index, columns, dtype=dtype) else: mgr = init_ndarray(data, index, columns, dtype=dtype, copy=copy) # For data is list-like, or Iterable (will consume into list) elif (isinstance(data, abc.Iterable) and not isinstance(data, (str, bytes))): if not isinstance(data, abc.Sequence): data = list(data) if len(data) > 0: if is_list_like(data[0]) and getattr(data[0], 'ndim', 1) == 1: if is_named_tuple(data[0]) and columns is None: columns = data[0]._fields arrays, columns = to_arrays(data, columns, dtype=dtype) columns = ensure_index(columns) # set the index if index is None: if isinstance(data[0], Series): index = get_names_from_index(data) elif isinstance(data[0], Categorical): index = ibase.default_index(len(data[0])) else: index = ibase.default_index(len(data)) mgr = arrays_to_mgr(arrays, columns, index, columns, dtype=dtype) else: mgr = init_ndarray(data, index, columns, dtype=dtype, copy=copy) else: mgr = init_dict({}, index, columns, dtype=dtype) else: try: arr = np.array(data, dtype=dtype, copy=copy) except (ValueError, TypeError) as e: exc = TypeError('DataFrame constructor called with ' 'incompatible data and dtype: {e}'.format(e=e)) raise_with_traceback(exc) if arr.ndim == 0 and index is not None and columns is not None: values = cast_scalar_to_array((len(index), len(columns)), data, dtype=dtype) mgr = init_ndarray(values, index, columns, dtype=values.dtype, copy=False) else: raise ValueError('DataFrame constructor not properly called!') NDFrame.__init__(self, mgr, fastpath=True) # ---------------------------------------------------------------------- @property def axes(self): """ Return a list representing the axes of the DataFrame. It has the row axis labels and column axis labels as the only members. They are returned in that order. Examples -------- >>> df = pd.DataFrame({'col1': [1, 2], 'col2': [3, 4]}) >>> df.axes [RangeIndex(start=0, stop=2, step=1), Index(['col1', 'col2'], dtype='object')] """ return [self.index, self.columns] @property def shape(self): """ Return a tuple representing the dimensionality of the DataFrame. See Also -------- ndarray.shape Examples -------- >>> df = pd.DataFrame({'col1': [1, 2], 'col2': [3, 4]}) >>> df.shape (2, 2) >>> df = pd.DataFrame({'col1': [1, 2], 'col2': [3, 4], ... 'col3': [5, 6]}) >>> df.shape (2, 3) """ return len(self.index), len(self.columns) @property def _is_homogeneous_type(self): """ Whether all the columns in a DataFrame have the same type. Returns ------- bool Examples -------- >>> DataFrame({"A": [1, 2], "B": [3, 4]})._is_homogeneous_type True >>> DataFrame({"A": [1, 2], "B": [3.0, 4.0]})._is_homogeneous_type False Items with the same type but different sizes are considered different types. >>> DataFrame({ ... "A": np.array([1, 2], dtype=np.int32), ... "B": np.array([1, 2], dtype=np.int64)})._is_homogeneous_type False """ if self._data.any_extension_types: return len({block.dtype for block in self._data.blocks}) == 1 else: return not self._data.is_mixed_type # ---------------------------------------------------------------------- # Rendering Methods def _repr_fits_vertical_(self): """ Check length against max_rows. """ max_rows = get_option("display.max_rows") return len(self) <= max_rows def _repr_fits_horizontal_(self, ignore_width=False): """ Check if full repr fits in horizontal boundaries imposed by the display options width and max_columns. In case off non-interactive session, no boundaries apply. `ignore_width` is here so ipnb+HTML output can behave the way users expect. display.max_columns remains in effect. GH3541, GH3573 """ width, height = console.get_console_size() max_columns = get_option("display.max_columns") nb_columns = len(self.columns) # exceed max columns if ((max_columns and nb_columns > max_columns) or ((not ignore_width) and width and nb_columns > (width // 2))): return False # used by repr_html under IPython notebook or scripts ignore terminal # dims if ignore_width or not console.in_interactive_session(): return True if (get_option('display.width') is not None or console.in_ipython_frontend()): # check at least the column row for excessive width max_rows = 1 else: max_rows = get_option("display.max_rows") # when auto-detecting, so width=None and not in ipython front end # check whether repr fits horizontal by actually checking # the width of the rendered repr buf = StringIO() # only care about the stuff we'll actually print out # and to_string on entire frame may be expensive d = self if not (max_rows is None): # unlimited rows # min of two, where one may be None d = d.iloc[:min(max_rows, len(d))] else: return True d.to_string(buf=buf) value = buf.getvalue() repr_width = max(len(l) for l in value.split('\n')) return repr_width < width def _info_repr(self): """ True if the repr should show the info view. """ info_repr_option = (get_option("display.large_repr") == "info") return info_repr_option and not (self._repr_fits_horizontal_() and self._repr_fits_vertical_()) def __repr__(self): """ Return a string representation for a particular DataFrame. """ buf = StringIO("") if self._info_repr(): self.info(buf=buf) return buf.getvalue() max_rows = get_option("display.max_rows") max_cols = get_option("display.max_columns") show_dimensions = get_option("display.show_dimensions") if get_option("display.expand_frame_repr"): width, _ = console.get_console_size() else: width = None self.to_string(buf=buf, max_rows=max_rows, max_cols=max_cols, line_width=width, show_dimensions=show_dimensions) return buf.getvalue() def _repr_html_(self): """ Return a html representation for a particular DataFrame. Mainly for IPython notebook. """ if self._info_repr(): buf = StringIO("") self.info(buf=buf) # need to escape the <class>, should be the first line. val = buf.getvalue().replace('<', r'&lt;', 1) val = val.replace('>', r'&gt;', 1) return '<pre>' + val + '</pre>' if get_option("display.notebook_repr_html"): max_rows = get_option("display.max_rows") max_cols = get_option("display.max_columns") show_dimensions = get_option("display.show_dimensions") return self.to_html(max_rows=max_rows, max_cols=max_cols, show_dimensions=show_dimensions, notebook=True) else: return None @Substitution(header='Write out the column names. If a list of strings ' 'is given, it is assumed to be aliases for the ' 'column names', col_space_type='int', col_space='The minimum width of each column') @Substitution(shared_params=fmt.common_docstring, returns=fmt.return_docstring) def to_string(self, buf=None, columns=None, col_space=None, header=True, index=True, na_rep='NaN', formatters=None, float_format=None, sparsify=None, index_names=True, justify=None, max_rows=None, max_cols=None, show_dimensions=False, decimal='.', line_width=None): """ Render a DataFrame to a console-friendly tabular output. %(shared_params)s line_width : int, optional Width to wrap a line in characters. %(returns)s See Also -------- to_html : Convert DataFrame to HTML. Examples -------- >>> d = {'col1': [1, 2, 3], 'col2': [4, 5, 6]} >>> df = pd.DataFrame(d) >>> print(df.to_string()) col1 col2 0 1 4 1 2 5 2 3 6 """ formatter = fmt.DataFrameFormatter(self, buf=buf, columns=columns, col_space=col_space, na_rep=na_rep, formatters=formatters, float_format=float_format, sparsify=sparsify, justify=justify, index_names=index_names, header=header, index=index, max_rows=max_rows, max_cols=max_cols, show_dimensions=show_dimensions, decimal=decimal, line_width=line_width) formatter.to_string() if buf is None: result = formatter.buf.getvalue() return result # ---------------------------------------------------------------------- @property def style(self): """ Property returning a Styler object containing methods for building a styled HTML representation fo the DataFrame. See Also -------- io.formats.style.Styler """ from pandas.io.formats.style import Styler return Styler(self) def iteritems(self): r""" Iterator over (column name, Series) pairs. Iterates over the DataFrame columns, returning a tuple with the column name and the content as a Series. Yields ------ label : object The column names for the DataFrame being iterated over. content : Series The column entries belonging to each label, as a Series. See Also -------- DataFrame.iterrows : Iterate over DataFrame rows as (index, Series) pairs. DataFrame.itertuples : Iterate over DataFrame rows as namedtuples of the values. Examples -------- >>> df = pd.DataFrame({'species': ['bear', 'bear', 'marsupial'], ... 'population': [1864, 22000, 80000]}, ... index=['panda', 'polar', 'koala']) >>> df species population panda bear 1864 polar bear 22000 koala marsupial 80000 >>> for label, content in df.iteritems(): ... print('label:', label) ... print('content:', content, sep='\n') ... label: species content: panda bear polar bear koala marsupial Name: species, dtype: object label: population content: panda 1864 polar 22000 koala 80000 Name: population, dtype: int64 """ if self.columns.is_unique and hasattr(self, '_item_cache'): for k in self.columns: yield k, self._get_item_cache(k) else: for i, k in enumerate(self.columns): yield k, self._ixs(i, axis=1) def iterrows(self): """ Iterate over DataFrame rows as (index, Series) pairs. Yields ------ index : label or tuple of label The index of the row. A tuple for a `MultiIndex`. data : Series The data of the row as a Series. it : generator A generator that iterates over the rows of the frame. See Also -------- itertuples : Iterate over DataFrame rows as namedtuples of the values. iteritems : Iterate over (column name, Series) pairs. Notes ----- 1. Because ``iterrows`` returns a Series for each row, it does **not** preserve dtypes across the rows (dtypes are preserved across columns for DataFrames). For example, >>> df = pd.DataFrame([[1, 1.5]], columns=['int', 'float']) >>> row = next(df.iterrows())[1] >>> row int 1.0 float 1.5 Name: 0, dtype: float64 >>> print(row['int'].dtype) float64 >>> print(df['int'].dtype) int64 To preserve dtypes while iterating over the rows, it is better to use :meth:`itertuples` which returns namedtuples of the values and which is generally faster than ``iterrows``. 2. You should **never modify** something you are iterating over. This is not guaranteed to work in all cases. Depending on the data types, the iterator returns a copy and not a view, and writing to it will have no effect. """ columns = self.columns klass = self._constructor_sliced for k, v in zip(self.index, self.values): s = klass(v, index=columns, name=k) yield k, s def itertuples(self, index=True, name="Pandas"): """ Iterate over DataFrame rows as namedtuples. Parameters ---------- index : bool, default True If True, return the index as the first element of the tuple. name : str or None, default "Pandas" The name of the returned namedtuples or None to return regular tuples. Returns ------- iterator An object to iterate over namedtuples for each row in the DataFrame with the first field possibly being the index and following fields being the column values. See Also -------- DataFrame.iterrows : Iterate over DataFrame rows as (index, Series) pairs. DataFrame.iteritems : Iterate over (column name, Series) pairs. Notes ----- The column names will be renamed to positional names if they are invalid Python identifiers, repeated, or start with an underscore. With a large number of columns (>255), regular tuples are returned. Examples -------- >>> df = pd.DataFrame({'num_legs': [4, 2], 'num_wings': [0, 2]}, ... index=['dog', 'hawk']) >>> df num_legs num_wings dog 4 0 hawk 2 2 >>> for row in df.itertuples(): ... print(row) ... Pandas(Index='dog', num_legs=4, num_wings=0) Pandas(Index='hawk', num_legs=2, num_wings=2) By setting the `index` parameter to False we can remove the index as the first element of the tuple: >>> for row in df.itertuples(index=False): ... print(row) ... Pandas(num_legs=4, num_wings=0) Pandas(num_legs=2, num_wings=2) With the `name` parameter set we set a custom name for the yielded namedtuples: >>> for row in df.itertuples(name='Animal'): ... print(row) ... Animal(Index='dog', num_legs=4, num_wings=0) Animal(Index='hawk', num_legs=2, num_wings=2) """ arrays = [] fields = list(self.columns) if index: arrays.append(self.index) fields.insert(0, "Index") # use integer indexing because of possible duplicate column names arrays.extend(self.iloc[:, k] for k in range(len(self.columns))) # Python 3 supports at most 255 arguments to constructor if name is not None and len(self.columns) + index < 256: itertuple = collections.namedtuple(name, fields, rename=True) return map(itertuple._make, zip(*arrays)) # fallback to regular tuples return zip(*arrays) items = iteritems def __len__(self): """ Returns length of info axis, but here we use the index. """ return len(self.index) def dot(self, other): """ Compute the matrix multiplication between the DataFrame and other. This method computes the matrix product between the DataFrame and the values of an other Series, DataFrame or a numpy array. It can also be called using ``self @ other`` in Python >= 3.5. Parameters ---------- other : Series, DataFrame or array-like The other object to compute the matrix product with. Returns ------- Series or DataFrame If other is a Series, return the matrix product between self and other as a Serie. If other is a DataFrame or a numpy.array, return the matrix product of self and other in a DataFrame of a np.array. See Also -------- Series.dot: Similar method for Series. Notes ----- The dimensions of DataFrame and other must be compatible in order to compute the matrix multiplication. In addition, the column names of DataFrame and the index of other must contain the same values, as they will be aligned prior to the multiplication. The dot method for Series computes the inner product, instead of the matrix product here. Examples -------- Here we multiply a DataFrame with a Series. >>> df = pd.DataFrame([[0, 1, -2, -1], [1, 1, 1, 1]]) >>> s = pd.Series([1, 1, 2, 1]) >>> df.dot(s) 0 -4 1 5 dtype: int64 Here we multiply a DataFrame with another DataFrame. >>> other = pd.DataFrame([[0, 1], [1, 2], [-1, -1], [2, 0]]) >>> df.dot(other) 0 1 0 1 4 1 2 2 Note that the dot method give the same result as @ >>> df @ other 0 1 0 1 4 1 2 2 The dot method works also if other is an np.array. >>> arr = np.array([[0, 1], [1, 2], [-1, -1], [2, 0]]) >>> df.dot(arr) 0 1 0 1 4 1 2 2 Note how shuffling of the objects does not change the result. >>> s2 = s.reindex([1, 0, 2, 3]) >>> df.dot(s2) 0 -4 1 5 dtype: int64 """ if isinstance(other, (Series, DataFrame)): common = self.columns.union(other.index) if (len(common) > len(self.columns) or len(common) > len(other.index)): raise ValueError('matrices are not aligned') left = self.reindex(columns=common, copy=False) right = other.reindex(index=common, copy=False) lvals = left.values rvals = right.values else: left = self lvals = self.values rvals = np.asarray(other) if lvals.shape[1] != rvals.shape[0]: raise ValueError('Dot product shape mismatch, ' '{s} vs {r}'.format(s=lvals.shape, r=rvals.shape)) if isinstance(other, DataFrame): return self._constructor(np.dot(lvals, rvals), index=left.index, columns=other.columns) elif isinstance(other, Series): return Series(np.dot(lvals, rvals), index=left.index) elif isinstance(rvals, (np.ndarray, Index)): result = np.dot(lvals, rvals) if result.ndim == 2: return self._constructor(result, index=left.index) else: return Series(result, index=left.index) else: # pragma: no cover raise TypeError('unsupported type: {oth}'.format(oth=type(other))) def __matmul__(self, other): """ Matrix multiplication using binary `@` operator in Python>=3.5. """ return self.dot(other) def __rmatmul__(self, other): """ Matrix multiplication using binary `@` operator in Python>=3.5. """ return self.T.dot(np.transpose(other)).T # ---------------------------------------------------------------------- # IO methods (to / from other formats) @classmethod def from_dict(cls, data, orient='columns', dtype=None, columns=None): """ Construct DataFrame from dict of array-like or dicts. Creates DataFrame object from dictionary by columns or by index allowing dtype specification. Parameters ---------- data : dict Of the form {field : array-like} or {field : dict}. orient : {'columns', 'index'}, default 'columns' The "orientation" of the data. If the keys of the passed dict should be the columns of the resulting DataFrame, pass 'columns' (default). Otherwise if the keys should be rows, pass 'index'. dtype : dtype, default None Data type to force, otherwise infer. columns : list, default None Column labels to use when ``orient='index'``. Raises a ValueError if used with ``orient='columns'``. .. versionadded:: 0.23.0 Returns ------- DataFrame See Also -------- DataFrame.from_records : DataFrame from ndarray (structured dtype), list of tuples, dict, or DataFrame. DataFrame : DataFrame object creation using constructor. Examples -------- By default the keys of the dict become the DataFrame columns: >>> data = {'col_1': [3, 2, 1, 0], 'col_2': ['a', 'b', 'c', 'd']} >>> pd.DataFrame.from_dict(data) col_1 col_2 0 3 a 1 2 b 2 1 c 3 0 d Specify ``orient='index'`` to create the DataFrame using dictionary keys as rows: >>> data = {'row_1': [3, 2, 1, 0], 'row_2': ['a', 'b', 'c', 'd']} >>> pd.DataFrame.from_dict(data, orient='index') 0 1 2 3 row_1 3 2 1 0 row_2 a b c d When using the 'index' orientation, the column names can be specified manually: >>> pd.DataFrame.from_dict(data, orient='index', ... columns=['A', 'B', 'C', 'D']) A B C D row_1 3 2 1 0 row_2 a b c d """ index = None orient = orient.lower() if orient == 'index': if len(data) > 0: # TODO speed up Series case if isinstance(list(data.values())[0], (Series, dict)): data = _from_nested_dict(data) else: data, index = list(data.values()), list(data.keys()) elif orient == 'columns': if columns is not None: raise ValueError("cannot use columns parameter with " "orient='columns'") else: # pragma: no cover raise ValueError('only recognize index or columns for orient') return cls(data, index=index, columns=columns, dtype=dtype) def to_numpy(self, dtype=None, copy=False): """ Convert the DataFrame to a NumPy array. .. versionadded:: 0.24.0 By default, the dtype of the returned array will be the common NumPy dtype of all types in the DataFrame. For example, if the dtypes are ``float16`` and ``float32``, the results dtype will be ``float32``. This may require copying data and coercing values, which may be expensive. Parameters ---------- dtype : str or numpy.dtype, optional The dtype to pass to :meth:`numpy.asarray` copy : bool, default False Whether to ensure that the returned value is a not a view on another array. Note that ``copy=False`` does not *ensure* that ``to_numpy()`` is no-copy. Rather, ``copy=True`` ensure that a copy is made, even if not strictly necessary. Returns ------- numpy.ndarray See Also -------- Series.to_numpy : Similar method for Series. Examples -------- >>> pd.DataFrame({"A": [1, 2], "B": [3, 4]}).to_numpy() array([[1, 3], [2, 4]]) With heterogenous data, the lowest common type will have to be used. >>> df = pd.DataFrame({"A": [1, 2], "B": [3.0, 4.5]}) >>> df.to_numpy() array([[1. , 3. ], [2. , 4.5]]) For a mix of numeric and non-numeric types, the output array will have object dtype. >>> df['C'] = pd.date_range('2000', periods=2) >>> df.to_numpy() array([[1, 3.0, Timestamp('2000-01-01 00:00:00')], [2, 4.5, Timestamp('2000-01-02 00:00:00')]], dtype=object) """ result = np.array(self.values, dtype=dtype, copy=copy) return result def to_dict(self, orient='dict', into=dict): """ Convert the DataFrame to a dictionary. The type of the key-value pairs can be customized with the parameters (see below). Parameters ---------- orient : str {'dict', 'list', 'series', 'split', 'records', 'index'} Determines the type of the values of the dictionary. - 'dict' (default) : dict like {column -> {index -> value}} - 'list' : dict like {column -> [values]} - 'series' : dict like {column -> Series(values)} - 'split' : dict like {'index' -> [index], 'columns' -> [columns], 'data' -> [values]} - 'records' : list like [{column -> value}, ... , {column -> value}] - 'index' : dict like {index -> {column -> value}} Abbreviations are allowed. `s` indicates `series` and `sp` indicates `split`. into : class, default dict The collections.abc.Mapping subclass used for all Mappings in the return value. Can be the actual class or an empty instance of the mapping type you want. If you want a collections.defaultdict, you must pass it initialized. .. versionadded:: 0.21.0 Returns ------- dict, list or collections.abc.Mapping Return a collections.abc.Mapping object representing the DataFrame. The resulting transformation depends on the `orient` parameter. See Also -------- DataFrame.from_dict: Create a DataFrame from a dictionary. DataFrame.to_json: Convert a DataFrame to JSON format. Examples -------- >>> df = pd.DataFrame({'col1': [1, 2], ... 'col2': [0.5, 0.75]}, ... index=['row1', 'row2']) >>> df col1 col2 row1 1 0.50 row2 2 0.75 >>> df.to_dict() {'col1': {'row1': 1, 'row2': 2}, 'col2': {'row1': 0.5, 'row2': 0.75}} You can specify the return orientation. >>> df.to_dict('series') {'col1': row1 1 row2 2 Name: col1, dtype: int64, 'col2': row1 0.50 row2 0.75 Name: col2, dtype: float64} >>> df.to_dict('split') {'index': ['row1', 'row2'], 'columns': ['col1', 'col2'], 'data': [[1, 0.5], [2, 0.75]]} >>> df.to_dict('records') [{'col1': 1, 'col2': 0.5}, {'col1': 2, 'col2': 0.75}] >>> df.to_dict('index') {'row1': {'col1': 1, 'col2': 0.5}, 'row2': {'col1': 2, 'col2': 0.75}} You can also specify the mapping type. >>> from collections import OrderedDict, defaultdict >>> df.to_dict(into=OrderedDict) OrderedDict([('col1', OrderedDict([('row1', 1), ('row2', 2)])), ('col2', OrderedDict([('row1', 0.5), ('row2', 0.75)]))]) If you want a `defaultdict`, you need to initialize it: >>> dd = defaultdict(list) >>> df.to_dict('records', into=dd) [defaultdict(<class 'list'>, {'col1': 1, 'col2': 0.5}), defaultdict(<class 'list'>, {'col1': 2, 'col2': 0.75})] """ if not self.columns.is_unique: warnings.warn("DataFrame columns are not unique, some " "columns will be omitted.", UserWarning, stacklevel=2) # GH16122 into_c = com.standardize_mapping(into) if orient.lower().startswith('d'): return into_c( (k, v.to_dict(into)) for k, v in self.items()) elif orient.lower().startswith('l'): return into_c((k, v.tolist()) for k, v in self.items()) elif orient.lower().startswith('sp'): return into_c((('index', self.index.tolist()), ('columns', self.columns.tolist()), ('data', [ list(map(com.maybe_box_datetimelike, t)) for t in self.itertuples(index=False, name=None) ]))) elif orient.lower().startswith('s'): return into_c((k, com.maybe_box_datetimelike(v)) for k, v in self.items()) elif orient.lower().startswith('r'): columns = self.columns.tolist() rows = (dict(zip(columns, row)) for row in self.itertuples(index=False, name=None)) return [ into_c((k, com.maybe_box_datetimelike(v)) for k, v in row.items()) for row in rows] elif orient.lower().startswith('i'): if not self.index.is_unique: raise ValueError( "DataFrame index must be unique for orient='index'." ) return into_c((t[0], dict(zip(self.columns, t[1:]))) for t in self.itertuples(name=None)) else: raise ValueError("orient '{o}' not understood".format(o=orient)) def to_gbq(self, destination_table, project_id=None, chunksize=None, reauth=False, if_exists='fail', auth_local_webserver=False, table_schema=None, location=None, progress_bar=True, credentials=None, verbose=None, private_key=None): """ Write a DataFrame to a Google BigQuery table. This function requires the `pandas-gbq package <https://pandas-gbq.readthedocs.io>`__. See the `How to authenticate with Google BigQuery <https://pandas-gbq.readthedocs.io/en/latest/howto/authentication.html>`__ guide for authentication instructions. Parameters ---------- destination_table : str Name of table to be written, in the form ``dataset.tablename``. project_id : str, optional Google BigQuery Account project ID. Optional when available from the environment. chunksize : int, optional Number of rows to be inserted in each chunk from the dataframe. Set to ``None`` to load the whole dataframe at once. reauth : bool, default False Force Google BigQuery to re-authenticate the user. This is useful if multiple accounts are used. if_exists : str, default 'fail' Behavior when the destination table exists. Value can be one of: ``'fail'`` If table exists, do nothing. ``'replace'`` If table exists, drop it, recreate it, and insert data. ``'append'`` If table exists, insert data. Create if does not exist. auth_local_webserver : bool, default False Use the `local webserver flow`_ instead of the `console flow`_ when getting user credentials. .. _local webserver flow: http://google-auth-oauthlib.readthedocs.io/en/latest/reference/google_auth_oauthlib.flow.html#google_auth_oauthlib.flow.InstalledAppFlow.run_local_server .. _console flow: http://google-auth-oauthlib.readthedocs.io/en/latest/reference/google_auth_oauthlib.flow.html#google_auth_oauthlib.flow.InstalledAppFlow.run_console *New in version 0.2.0 of pandas-gbq*. table_schema : list of dicts, optional List of BigQuery table fields to which according DataFrame columns conform to, e.g. ``[{'name': 'col1', 'type': 'STRING'},...]``. If schema is not provided, it will be generated according to dtypes of DataFrame columns. See BigQuery API documentation on available names of a field. *New in version 0.3.1 of pandas-gbq*. location : str, optional Location where the load job should run. See the `BigQuery locations documentation <https://cloud.google.com/bigquery/docs/dataset-locations>`__ for a list of available locations. The location must match that of the target dataset. *New in version 0.5.0 of pandas-gbq*. progress_bar : bool, default True Use the library `tqdm` to show the progress bar for the upload, chunk by chunk. *New in version 0.5.0 of pandas-gbq*. credentials : google.auth.credentials.Credentials, optional Credentials for accessing Google APIs. Use this parameter to override default credentials, such as to use Compute Engine :class:`google.auth.compute_engine.Credentials` or Service Account :class:`google.oauth2.service_account.Credentials` directly. *New in version 0.8.0 of pandas-gbq*. .. versionadded:: 0.24.0 verbose : bool, deprecated Deprecated in pandas-gbq version 0.4.0. Use the `logging module to adjust verbosity instead <https://pandas-gbq.readthedocs.io/en/latest/intro.html#logging>`__. private_key : str, deprecated Deprecated in pandas-gbq version 0.8.0. Use the ``credentials`` parameter and :func:`google.oauth2.service_account.Credentials.from_service_account_info` or :func:`google.oauth2.service_account.Credentials.from_service_account_file` instead. Service account private key in JSON format. Can be file path or string contents. This is useful for remote server authentication (eg. Jupyter/IPython notebook on remote host). See Also -------- pandas_gbq.to_gbq : This function in the pandas-gbq library. read_gbq : Read a DataFrame from Google BigQuery. """ from pandas.io import gbq gbq.to_gbq(self, destination_table, project_id=project_id, chunksize=chunksize, reauth=reauth, if_exists=if_exists, auth_local_webserver=auth_local_webserver, table_schema=table_schema, location=location, progress_bar=progress_bar, credentials=credentials, verbose=verbose, private_key=private_key) @classmethod def from_records(cls, data, index=None, exclude=None, columns=None, coerce_float=False, nrows=None): """ Convert structured or record ndarray to DataFrame. Parameters ---------- data : ndarray (structured dtype), list of tuples, dict, or DataFrame index : string, list of fields, array-like Field of array to use as the index, alternately a specific set of input labels to use exclude : sequence, default None Columns or fields to exclude columns : sequence, default None Column names to use. If the passed data do not have names associated with them, this argument provides names for the columns. Otherwise this argument indicates the order of the columns in the result (any names not found in the data will become all-NA columns) coerce_float : boolean, default False Attempt to convert values of non-string, non-numeric objects (like decimal.Decimal) to floating point, useful for SQL result sets nrows : int, default None Number of rows to read if data is an iterator Returns ------- DataFrame """ # Make a copy of the input columns so we can modify it if columns is not None: columns = ensure_index(columns) if is_iterator(data): if nrows == 0: return cls() try: first_row = next(data) except StopIteration: return cls(index=index, columns=columns) dtype = None if hasattr(first_row, 'dtype') and first_row.dtype.names: dtype = first_row.dtype values = [first_row] if nrows is None: values += data else: values.extend(itertools.islice(data, nrows - 1)) if dtype is not None: data = np.array(values, dtype=dtype) else: data = values if isinstance(data, dict): if columns is None: columns = arr_columns = ensure_index(sorted(data)) arrays = [data[k] for k in columns] else: arrays = [] arr_columns = [] for k, v in data.items(): if k in columns: arr_columns.append(k) arrays.append(v) arrays, arr_columns = reorder_arrays(arrays, arr_columns, columns) elif isinstance(data, (np.ndarray, DataFrame)): arrays, columns = to_arrays(data, columns) if columns is not None: columns = ensure_index(columns) arr_columns = columns else: arrays, arr_columns = to_arrays(data, columns, coerce_float=coerce_float) arr_columns = ensure_index(arr_columns) if columns is not None: columns = ensure_index(columns) else: columns = arr_columns if exclude is None: exclude = set() else: exclude = set(exclude) result_index = None if index is not None: if (isinstance(index, str) or not hasattr(index, "__iter__")): i = columns.get_loc(index) exclude.add(index) if len(arrays) > 0: result_index = Index(arrays[i], name=index) else: result_index = Index([], name=index) else: try: index_data = [arrays[arr_columns.get_loc(field)] for field in index] result_index = ensure_index_from_sequences(index_data, names=index) exclude.update(index) except Exception: result_index = index if any(exclude): arr_exclude = [x for x in exclude if x in arr_columns] to_remove = [arr_columns.get_loc(col) for col in arr_exclude] arrays = [v for i, v in enumerate(arrays) if i not in to_remove] arr_columns = arr_columns.drop(arr_exclude) columns = columns.drop(exclude) mgr = arrays_to_mgr(arrays, arr_columns, result_index, columns) return cls(mgr) def to_records(self, index=True, convert_datetime64=None, column_dtypes=None, index_dtypes=None): """ Convert DataFrame to a NumPy record array. Index will be included as the first field of the record array if requested. Parameters ---------- index : bool, default True Include index in resulting record array, stored in 'index' field or using the index label, if set. convert_datetime64 : bool, default None .. deprecated:: 0.23.0 Whether to convert the index to datetime.datetime if it is a DatetimeIndex. column_dtypes : str, type, dict, default None .. versionadded:: 0.24.0 If a string or type, the data type to store all columns. If a dictionary, a mapping of column names and indices (zero-indexed) to specific data types. index_dtypes : str, type, dict, default None .. versionadded:: 0.24.0 If a string or type, the data type to store all index levels. If a dictionary, a mapping of index level names and indices (zero-indexed) to specific data types. This mapping is applied only if `index=True`. Returns ------- numpy.recarray NumPy ndarray with the DataFrame labels as fields and each row of the DataFrame as entries. See Also -------- DataFrame.from_records: Convert structured or record ndarray to DataFrame. numpy.recarray: An ndarray that allows field access using attributes, analogous to typed columns in a spreadsheet. Examples -------- >>> df = pd.DataFrame({'A': [1, 2], 'B': [0.5, 0.75]}, ... index=['a', 'b']) >>> df A B a 1 0.50 b 2 0.75 >>> df.to_records() rec.array([('a', 1, 0.5 ), ('b', 2, 0.75)], dtype=[('index', 'O'), ('A', '<i8'), ('B', '<f8')]) If the DataFrame index has no label then the recarray field name is set to 'index'. If the index has a label then this is used as the field name: >>> df.index = df.index.rename("I") >>> df.to_records() rec.array([('a', 1, 0.5 ), ('b', 2, 0.75)], dtype=[('I', 'O'), ('A', '<i8'), ('B', '<f8')]) The index can be excluded from the record array: >>> df.to_records(index=False) rec.array([(1, 0.5 ), (2, 0.75)], dtype=[('A', '<i8'), ('B', '<f8')]) Data types can be specified for the columns: >>> df.to_records(column_dtypes={"A": "int32"}) rec.array([('a', 1, 0.5 ), ('b', 2, 0.75)], dtype=[('I', 'O'), ('A', '<i4'), ('B', '<f8')]) As well as for the index: >>> df.to_records(index_dtypes="<S2") rec.array([(b'a', 1, 0.5 ), (b'b', 2, 0.75)], dtype=[('I', 'S2'), ('A', '<i8'), ('B', '<f8')]) >>> index_dtypes = "<S{}".format(df.index.str.len().max()) >>> df.to_records(index_dtypes=index_dtypes) rec.array([(b'a', 1, 0.5 ), (b'b', 2, 0.75)], dtype=[('I', 'S1'), ('A', '<i8'), ('B', '<f8')]) """ if convert_datetime64 is not None: warnings.warn("The 'convert_datetime64' parameter is " "deprecated and will be removed in a future " "version", FutureWarning, stacklevel=2) if index: if is_datetime64_any_dtype(self.index) and convert_datetime64: ix_vals = [self.index.to_pydatetime()] else: if isinstance(self.index, MultiIndex): # array of tuples to numpy cols. copy copy copy ix_vals = list(map(np.array, zip(*self.index.values))) else: ix_vals = [self.index.values] arrays = ix_vals + [self[c].get_values() for c in self.columns] count = 0 index_names = list(self.index.names) if isinstance(self.index, MultiIndex): for i, n in enumerate(index_names): if n is None: index_names[i] = 'level_%d' % count count += 1 elif index_names[0] is None: index_names = ['index'] names = [str(name) for name in itertools.chain(index_names, self.columns)] else: arrays = [self[c].get_values() for c in self.columns] names = [str(c) for c in self.columns] index_names = [] index_len = len(index_names) formats = [] for i, v in enumerate(arrays): index = i # When the names and arrays are collected, we # first collect those in the DataFrame's index, # followed by those in its columns. # # Thus, the total length of the array is: # len(index_names) + len(DataFrame.columns). # # This check allows us to see whether we are # handling a name / array in the index or column. if index < index_len: dtype_mapping = index_dtypes name = index_names[index] else: index -= index_len dtype_mapping = column_dtypes name = self.columns[index] # We have a dictionary, so we get the data type # associated with the index or column (which can # be denoted by its name in the DataFrame or its # position in DataFrame's array of indices or # columns, whichever is applicable. if is_dict_like(dtype_mapping): if name in dtype_mapping: dtype_mapping = dtype_mapping[name] elif index in dtype_mapping: dtype_mapping = dtype_mapping[index] else: dtype_mapping = None # If no mapping can be found, use the array's # dtype attribute for formatting. # # A valid dtype must either be a type or # string naming a type. if dtype_mapping is None: formats.append(v.dtype) elif isinstance(dtype_mapping, (type, np.dtype, str)): formats.append(dtype_mapping) else: element = "row" if i < index_len else "column" msg = ("Invalid dtype {dtype} specified for " "{element} {name}").format(dtype=dtype_mapping, element=element, name=name) raise ValueError(msg) return np.rec.fromarrays( arrays, dtype={'names': names, 'formats': formats} ) @classmethod def from_items(cls, items, columns=None, orient='columns'): """ Construct a DataFrame from a list of tuples. .. deprecated:: 0.23.0 `from_items` is deprecated and will be removed in a future version. Use :meth:`DataFrame.from_dict(dict(items)) <DataFrame.from_dict>` instead. :meth:`DataFrame.from_dict(OrderedDict(items)) <DataFrame.from_dict>` may be used to preserve the key order. Convert (key, value) pairs to DataFrame. The keys will be the axis index (usually the columns, but depends on the specified orientation). The values should be arrays or Series. Parameters ---------- items : sequence of (key, value) pairs Values should be arrays or Series. columns : sequence of column labels, optional Must be passed if orient='index'. orient : {'columns', 'index'}, default 'columns' The "orientation" of the data. If the keys of the input correspond to column labels, pass 'columns' (default). Otherwise if the keys correspond to the index, pass 'index'. Returns ------- DataFrame """ warnings.warn("from_items is deprecated. Please use " "DataFrame.from_dict(dict(items), ...) instead. " "DataFrame.from_dict(OrderedDict(items)) may be used to " "preserve the key order.", FutureWarning, stacklevel=2) keys, values = zip(*items) if orient == 'columns': if columns is not None: columns = ensure_index(columns) idict = dict(items) if len(idict) < len(items): if not columns.equals(ensure_index(keys)): raise ValueError('With non-unique item names, passed ' 'columns must be identical') arrays = values else: arrays = [idict[k] for k in columns if k in idict] else: columns = ensure_index(keys) arrays = values # GH 17312 # Provide more informative error msg when scalar values passed try: return cls._from_arrays(arrays, columns, None) except ValueError: if not is_nested_list_like(values): raise ValueError('The value in each (key, value) pair ' 'must be an array, Series, or dict') elif orient == 'index': if columns is None: raise TypeError("Must pass columns with orient='index'") keys = ensure_index(keys) # GH 17312 # Provide more informative error msg when scalar values passed try: arr = np.array(values, dtype=object).T data = [lib.maybe_convert_objects(v) for v in arr] return cls._from_arrays(data, columns, keys) except TypeError: if not is_nested_list_like(values): raise ValueError('The value in each (key, value) pair ' 'must be an array, Series, or dict') else: # pragma: no cover raise ValueError("'orient' must be either 'columns' or 'index'") @classmethod def _from_arrays(cls, arrays, columns, index, dtype=None): mgr = arrays_to_mgr(arrays, columns, index, columns, dtype=dtype) return cls(mgr) @classmethod def from_csv(cls, path, header=0, sep=',', index_col=0, parse_dates=True, encoding=None, tupleize_cols=None, infer_datetime_format=False): """ Read CSV file. .. deprecated:: 0.21.0 Use :func:`read_csv` instead. It is preferable to use the more powerful :func:`read_csv` for most general purposes, but ``from_csv`` makes for an easy roundtrip to and from a file (the exact counterpart of ``to_csv``), especially with a DataFrame of time series data. This method only differs from the preferred :func:`read_csv` in some defaults: - `index_col` is ``0`` instead of ``None`` (take first column as index by default) - `parse_dates` is ``True`` instead of ``False`` (try parsing the index as datetime by default) So a ``pd.DataFrame.from_csv(path)`` can be replaced by ``pd.read_csv(path, index_col=0, parse_dates=True)``. Parameters ---------- path : string file path or file handle / StringIO header : int, default 0 Row to use as header (skip prior rows) sep : string, default ',' Field delimiter index_col : int or sequence, default 0 Column to use for index. If a sequence is given, a MultiIndex is used. Different default from read_table parse_dates : boolean, default True Parse dates. Different default from read_table tupleize_cols : boolean, default False write multi_index columns as a list of tuples (if True) or new (expanded format) if False) infer_datetime_format : boolean, default False If True and `parse_dates` is True for a column, try to infer the datetime format based on the first datetime string. If the format can be inferred, there often will be a large parsing speed-up. Returns ------- DataFrame See Also -------- read_csv """ warnings.warn("from_csv is deprecated. Please use read_csv(...) " "instead. Note that some of the default arguments are " "different, so please refer to the documentation " "for from_csv when changing your function calls", FutureWarning, stacklevel=2) from pandas.io.parsers import read_csv return read_csv(path, header=header, sep=sep, parse_dates=parse_dates, index_col=index_col, encoding=encoding, tupleize_cols=tupleize_cols, infer_datetime_format=infer_datetime_format) def to_sparse(self, fill_value=None, kind='block'): """ Convert to SparseDataFrame. .. deprecated:: 0.25.0 Implement the sparse version of the DataFrame meaning that any data matching a specific value it's omitted in the representation. The sparse DataFrame allows for a more efficient storage. Parameters ---------- fill_value : float, default None The specific value that should be omitted in the representation. kind : {'block', 'integer'}, default 'block' The kind of the SparseIndex tracking where data is not equal to the fill value: - 'block' tracks only the locations and sizes of blocks of data. - 'integer' keeps an array with all the locations of the data. In most cases 'block' is recommended, since it's more memory efficient. Returns ------- SparseDataFrame The sparse representation of the DataFrame. See Also -------- DataFrame.to_dense : Converts the DataFrame back to the its dense form. Examples -------- >>> df = pd.DataFrame([(np.nan, np.nan), ... (1., np.nan), ... (np.nan, 1.)]) >>> df 0 1 0 NaN NaN 1 1.0 NaN 2 NaN 1.0 >>> type(df) <class 'pandas.core.frame.DataFrame'> >>> sdf = df.to_sparse() # doctest: +SKIP >>> sdf # doctest: +SKIP 0 1 0 NaN NaN 1 1.0 NaN 2 NaN 1.0 >>> type(sdf) # doctest: +SKIP <class 'pandas.core.sparse.frame.SparseDataFrame'> """ warnings.warn("DataFrame.to_sparse is deprecated and will be removed " "in a future version", FutureWarning, stacklevel=2) from pandas.core.sparse.api import SparseDataFrame with warnings.catch_warnings(): warnings.filterwarnings("ignore", message="SparseDataFrame") return SparseDataFrame(self._series, index=self.index, columns=self.columns, default_kind=kind, default_fill_value=fill_value) @deprecate_kwarg(old_arg_name='encoding', new_arg_name=None) def to_stata(self, fname, convert_dates=None, write_index=True, encoding="latin-1", byteorder=None, time_stamp=None, data_label=None, variable_labels=None, version=114, convert_strl=None): """ Export DataFrame object to Stata dta format. Writes the DataFrame to a Stata dataset file. "dta" files contain a Stata dataset. Parameters ---------- fname : str, buffer or path object String, path object (pathlib.Path or py._path.local.LocalPath) or object implementing a binary write() function. If using a buffer then the buffer will not be automatically closed after the file data has been written. convert_dates : dict Dictionary mapping columns containing datetime types to stata internal format to use when writing the dates. Options are 'tc', 'td', 'tm', 'tw', 'th', 'tq', 'ty'. Column can be either an integer or a name. Datetime columns that do not have a conversion type specified will be converted to 'tc'. Raises NotImplementedError if a datetime column has timezone information. write_index : bool Write the index to Stata dataset. encoding : str Default is latin-1. Unicode is not supported. byteorder : str Can be ">", "<", "little", or "big". default is `sys.byteorder`. time_stamp : datetime A datetime to use as file creation date. Default is the current time. data_label : str, optional A label for the data set. Must be 80 characters or smaller. variable_labels : dict Dictionary containing columns as keys and variable labels as values. Each label must be 80 characters or smaller. .. versionadded:: 0.19.0 version : {114, 117}, default 114 Version to use in the output dta file. Version 114 can be used read by Stata 10 and later. Version 117 can be read by Stata 13 or later. Version 114 limits string variables to 244 characters or fewer while 117 allows strings with lengths up to 2,000,000 characters. .. versionadded:: 0.23.0 convert_strl : list, optional List of column names to convert to string columns to Stata StrL format. Only available if version is 117. Storing strings in the StrL format can produce smaller dta files if strings have more than 8 characters and values are repeated. .. versionadded:: 0.23.0 Raises ------ NotImplementedError * If datetimes contain timezone information * Column dtype is not representable in Stata ValueError * Columns listed in convert_dates are neither datetime64[ns] or datetime.datetime * Column listed in convert_dates is not in DataFrame * Categorical label contains more than 32,000 characters .. versionadded:: 0.19.0 See Also -------- read_stata : Import Stata data files. io.stata.StataWriter : Low-level writer for Stata data files. io.stata.StataWriter117 : Low-level writer for version 117 files. Examples -------- >>> df = pd.DataFrame({'animal': ['falcon', 'parrot', 'falcon', ... 'parrot'], ... 'speed': [350, 18, 361, 15]}) >>> df.to_stata('animals.dta') # doctest: +SKIP """ kwargs = {} if version not in (114, 117): raise ValueError('Only formats 114 and 117 supported.') if version == 114: if convert_strl is not None: raise ValueError('strl support is only available when using ' 'format 117') from pandas.io.stata import StataWriter as statawriter else: from pandas.io.stata import StataWriter117 as statawriter kwargs['convert_strl'] = convert_strl writer = statawriter(fname, self, convert_dates=convert_dates, byteorder=byteorder, time_stamp=time_stamp, data_label=data_label, write_index=write_index, variable_labels=variable_labels, **kwargs) writer.write_file() def to_feather(self, fname): """ Write out the binary feather-format for DataFrames. .. versionadded:: 0.20.0 Parameters ---------- fname : str string file path """ from pandas.io.feather_format import to_feather to_feather(self, fname) def to_parquet(self, fname, engine='auto', compression='snappy', index=None, partition_cols=None, **kwargs): """ Write a DataFrame to the binary parquet format. .. versionadded:: 0.21.0 This function writes the dataframe as a `parquet file <https://parquet.apache.org/>`_. You can choose different parquet backends, and have the option of compression. See :ref:`the user guide <io.parquet>` for more details. Parameters ---------- fname : str File path or Root Directory path. Will be used as Root Directory path while writing a partitioned dataset. .. versionchanged:: 0.24.0 engine : {'auto', 'pyarrow', 'fastparquet'}, default 'auto' Parquet library to use. If 'auto', then the option ``io.parquet.engine`` is used. The default ``io.parquet.engine`` behavior is to try 'pyarrow', falling back to 'fastparquet' if 'pyarrow' is unavailable. compression : {'snappy', 'gzip', 'brotli', None}, default 'snappy' Name of the compression to use. Use ``None`` for no compression. index : bool, default None If ``True``, include the dataframe's index(es) in the file output. If ``False``, they will not be written to the file. If ``None``, the behavior depends on the chosen engine. .. versionadded:: 0.24.0 partition_cols : list, optional, default None Column names by which to partition the dataset Columns are partitioned in the order they are given .. versionadded:: 0.24.0 **kwargs Additional arguments passed to the parquet library. See :ref:`pandas io <io.parquet>` for more details. See Also -------- read_parquet : Read a parquet file. DataFrame.to_csv : Write a csv file. DataFrame.to_sql : Write to a sql table. DataFrame.to_hdf : Write to hdf. Notes ----- This function requires either the `fastparquet <https://pypi.org/project/fastparquet>`_ or `pyarrow <https://arrow.apache.org/docs/python/>`_ library. Examples -------- >>> df = pd.DataFrame(data={'col1': [1, 2], 'col2': [3, 4]}) >>> df.to_parquet('df.parquet.gzip', ... compression='gzip') # doctest: +SKIP >>> pd.read_parquet('df.parquet.gzip') # doctest: +SKIP col1 col2 0 1 3 1 2 4 """ from pandas.io.parquet import to_parquet to_parquet(self, fname, engine, compression=compression, index=index, partition_cols=partition_cols, **kwargs) @Substitution(header='Whether to print column labels, default True', col_space_type='str or int', col_space='The minimum width of each column in CSS length ' 'units. An int is assumed to be px units.\n\n' ' .. versionadded:: 0.25.0\n' ' Ability to use str') @Substitution(shared_params=fmt.common_docstring, returns=fmt.return_docstring) def to_html(self, buf=None, columns=None, col_space=None, header=True, index=True, na_rep='NaN', formatters=None, float_format=None, sparsify=None, index_names=True, justify=None, max_rows=None, max_cols=None, show_dimensions=False, decimal='.', bold_rows=True, classes=None, escape=True, notebook=False, border=None, table_id=None, render_links=False): """ Render a DataFrame as an HTML table. %(shared_params)s bold_rows : bool, default True Make the row labels bold in the output. classes : str or list or tuple, default None CSS class(es) to apply to the resulting html table. escape : bool, default True Convert the characters <, >, and & to HTML-safe sequences. notebook : {True, False}, default False Whether the generated HTML is for IPython Notebook. border : int A ``border=border`` attribute is included in the opening `<table>` tag. Default ``pd.options.display.html.border``. .. versionadded:: 0.19.0 table_id : str, optional A css id is included in the opening `<table>` tag if specified. .. versionadded:: 0.23.0 render_links : bool, default False Convert URLs to HTML links. .. versionadded:: 0.24.0 %(returns)s See Also -------- to_string : Convert DataFrame to a string. """ if (justify is not None and justify not in fmt._VALID_JUSTIFY_PARAMETERS): raise ValueError("Invalid value for justify parameter") formatter = fmt.DataFrameFormatter(self, buf=buf, columns=columns, col_space=col_space, na_rep=na_rep, formatters=formatters, float_format=float_format, sparsify=sparsify, justify=justify, index_names=index_names, header=header, index=index, bold_rows=bold_rows, escape=escape, max_rows=max_rows, max_cols=max_cols, show_dimensions=show_dimensions, decimal=decimal, table_id=table_id, render_links=render_links) # TODO: a generic formatter wld b in DataFrameFormatter formatter.to_html(classes=classes, notebook=notebook, border=border) if buf is None: return formatter.buf.getvalue() # ---------------------------------------------------------------------- def info(self, verbose=None, buf=None, max_cols=None, memory_usage=None, null_counts=None): """ Print a concise summary of a DataFrame. This method prints information about a DataFrame including the index dtype and column dtypes, non-null values and memory usage. Parameters ---------- verbose : bool, optional Whether to print the full summary. By default, the setting in ``pandas.options.display.max_info_columns`` is followed. buf : writable buffer, defaults to sys.stdout Where to send the output. By default, the output is printed to sys.stdout. Pass a writable buffer if you need to further process the output. max_cols : int, optional When to switch from the verbose to the truncated output. If the DataFrame has more than `max_cols` columns, the truncated output is used. By default, the setting in ``pandas.options.display.max_info_columns`` is used. memory_usage : bool, str, optional Specifies whether total memory usage of the DataFrame elements (including the index) should be displayed. By default, this follows the ``pandas.options.display.memory_usage`` setting. True always show memory usage. False never shows memory usage. A value of 'deep' is equivalent to "True with deep introspection". Memory usage is shown in human-readable units (base-2 representation). Without deep introspection a memory estimation is made based in column dtype and number of rows assuming values consume the same memory amount for corresponding dtypes. With deep memory introspection, a real memory usage calculation is performed at the cost of computational resources. null_counts : bool, optional Whether to show the non-null counts. By default, this is shown only if the frame is smaller than ``pandas.options.display.max_info_rows`` and ``pandas.options.display.max_info_columns``. A value of True always shows the counts, and False never shows the counts. Returns ------- None This method prints a summary of a DataFrame and returns None. See Also -------- DataFrame.describe: Generate descriptive statistics of DataFrame columns. DataFrame.memory_usage: Memory usage of DataFrame columns. Examples -------- >>> int_values = [1, 2, 3, 4, 5] >>> text_values = ['alpha', 'beta', 'gamma', 'delta', 'epsilon'] >>> float_values = [0.0, 0.25, 0.5, 0.75, 1.0] >>> df = pd.DataFrame({"int_col": int_values, "text_col": text_values, ... "float_col": float_values}) >>> df int_col text_col float_col 0 1 alpha 0.00 1 2 beta 0.25 2 3 gamma 0.50 3 4 delta 0.75 4 5 epsilon 1.00 Prints information of all columns: >>> df.info(verbose=True) <class 'pandas.core.frame.DataFrame'> RangeIndex: 5 entries, 0 to 4 Data columns (total 3 columns): int_col 5 non-null int64 text_col 5 non-null object float_col 5 non-null float64 dtypes: float64(1), int64(1), object(1) memory usage: 248.0+ bytes Prints a summary of columns count and its dtypes but not per column information: >>> df.info(verbose=False) <class 'pandas.core.frame.DataFrame'> RangeIndex: 5 entries, 0 to 4 Columns: 3 entries, int_col to float_col dtypes: float64(1), int64(1), object(1) memory usage: 248.0+ bytes Pipe output of DataFrame.info to buffer instead of sys.stdout, get buffer content and writes to a text file: >>> import io >>> buffer = io.StringIO() >>> df.info(buf=buffer) >>> s = buffer.getvalue() >>> with open("df_info.txt", "w", ... encoding="utf-8") as f: # doctest: +SKIP ... f.write(s) 260 The `memory_usage` parameter allows deep introspection mode, specially useful for big DataFrames and fine-tune memory optimization: >>> random_strings_array = np.random.choice(['a', 'b', 'c'], 10 ** 6) >>> df = pd.DataFrame({ ... 'column_1': np.random.choice(['a', 'b', 'c'], 10 ** 6), ... 'column_2': np.random.choice(['a', 'b', 'c'], 10 ** 6), ... 'column_3': np.random.choice(['a', 'b', 'c'], 10 ** 6) ... }) >>> df.info() <class 'pandas.core.frame.DataFrame'> RangeIndex: 1000000 entries, 0 to 999999 Data columns (total 3 columns): column_1 1000000 non-null object column_2 1000000 non-null object column_3 1000000 non-null object dtypes: object(3) memory usage: 22.9+ MB >>> df.info(memory_usage='deep') <class 'pandas.core.frame.DataFrame'> RangeIndex: 1000000 entries, 0 to 999999 Data columns (total 3 columns): column_1 1000000 non-null object column_2 1000000 non-null object column_3 1000000 non-null object dtypes: object(3) memory usage: 188.8 MB """ if buf is None: # pragma: no cover buf = sys.stdout lines = [] lines.append(str(type(self))) lines.append(self.index._summary()) if len(self.columns) == 0: lines.append('Empty {name}'.format(name=type(self).__name__)) fmt.buffer_put_lines(buf, lines) return cols = self.columns # hack if max_cols is None: max_cols = get_option('display.max_info_columns', len(self.columns) + 1) max_rows = get_option('display.max_info_rows', len(self) + 1) if null_counts is None: show_counts = ((len(self.columns) <= max_cols) and (len(self) < max_rows)) else: show_counts = null_counts exceeds_info_cols = len(self.columns) > max_cols def _verbose_repr(): lines.append('Data columns (total %d columns):' % len(self.columns)) space = max(len(pprint_thing(k)) for k in self.columns) + 4 counts = None tmpl = "{count}{dtype}" if show_counts: counts = self.count() if len(cols) != len(counts): # pragma: no cover raise AssertionError( 'Columns must equal counts ' '({cols:d} != {counts:d})'.format( cols=len(cols), counts=len(counts))) tmpl = "{count} non-null {dtype}" dtypes = self.dtypes for i, col in enumerate(self.columns): dtype = dtypes.iloc[i] col = pprint_thing(col) count = "" if show_counts: count = counts.iloc[i] lines.append(_put_str(col, space) + tmpl.format(count=count, dtype=dtype)) def _non_verbose_repr(): lines.append(self.columns._summary(name='Columns')) def _sizeof_fmt(num, size_qualifier): # returns size in human readable format for x in ['bytes', 'KB', 'MB', 'GB', 'TB']: if num < 1024.0: return ("{num:3.1f}{size_q} " "{x}".format(num=num, size_q=size_qualifier, x=x)) num /= 1024.0 return "{num:3.1f}{size_q} {pb}".format(num=num, size_q=size_qualifier, pb='PB') if verbose: _verbose_repr() elif verbose is False: # specifically set to False, not nesc None _non_verbose_repr() else: if exceeds_info_cols: _non_verbose_repr() else: _verbose_repr() counts = self.get_dtype_counts() dtypes = ['{k}({kk:d})'.format(k=k[0], kk=k[1]) for k in sorted(counts.items())] lines.append('dtypes: {types}'.format(types=', '.join(dtypes))) if memory_usage is None: memory_usage = get_option('display.memory_usage') if memory_usage: # append memory usage of df to display size_qualifier = '' if memory_usage == 'deep': deep = True else: # size_qualifier is just a best effort; not guaranteed to catch # all cases (e.g., it misses categorical data even with object # categories) deep = False if ('object' in counts or self.index._is_memory_usage_qualified()): size_qualifier = '+' mem_usage = self.memory_usage(index=True, deep=deep).sum() lines.append("memory usage: {mem}\n".format( mem=_sizeof_fmt(mem_usage, size_qualifier))) fmt.buffer_put_lines(buf, lines) def memory_usage(self, index=True, deep=False): """ Return the memory usage of each column in bytes. The memory usage can optionally include the contribution of the index and elements of `object` dtype. This value is displayed in `DataFrame.info` by default. This can be suppressed by setting ``pandas.options.display.memory_usage`` to False. Parameters ---------- index : bool, default True Specifies whether to include the memory usage of the DataFrame's index in returned Series. If ``index=True``, the memory usage of the index is the first item in the output. deep : bool, default False If True, introspect the data deeply by interrogating `object` dtypes for system-level memory consumption, and include it in the returned values. Returns ------- Series A Series whose index is the original column names and whose values is the memory usage of each column in bytes. See Also -------- numpy.ndarray.nbytes : Total bytes consumed by the elements of an ndarray. Series.memory_usage : Bytes consumed by a Series. Categorical : Memory-efficient array for string values with many repeated values. DataFrame.info : Concise summary of a DataFrame. Examples -------- >>> dtypes = ['int64', 'float64', 'complex128', 'object', 'bool'] >>> data = dict([(t, np.ones(shape=5000).astype(t)) ... for t in dtypes]) >>> df = pd.DataFrame(data) >>> df.head() int64 float64 complex128 object bool 0 1 1.0 1.0+0.0j 1 True 1 1 1.0 1.0+0.0j 1 True 2 1 1.0 1.0+0.0j 1 True 3 1 1.0 1.0+0.0j 1 True 4 1 1.0 1.0+0.0j 1 True >>> df.memory_usage() Index 128 int64 40000 float64 40000 complex128 80000 object 40000 bool 5000 dtype: int64 >>> df.memory_usage(index=False) int64 40000 float64 40000 complex128 80000 object 40000 bool 5000 dtype: int64 The memory footprint of `object` dtype columns is ignored by default: >>> df.memory_usage(deep=True) Index 128 int64 40000 float64 40000 complex128 80000 object 160000 bool 5000 dtype: int64 Use a Categorical for efficient storage of an object-dtype column with many repeated values. >>> df['object'].astype('category').memory_usage(deep=True) 5216 """ result = Series([c.memory_usage(index=False, deep=deep) for col, c in self.iteritems()], index=self.columns) if index: result = Series(self.index.memory_usage(deep=deep), index=['Index']).append(result) return result def transpose(self, *args, **kwargs): """ Transpose index and columns. Reflect the DataFrame over its main diagonal by writing rows as columns and vice-versa. The property :attr:`.T` is an accessor to the method :meth:`transpose`. Parameters ---------- copy : bool, default False If True, the underlying data is copied. Otherwise (default), no copy is made if possible. *args, **kwargs Additional keywords have no effect but might be accepted for compatibility with numpy. Returns ------- DataFrame The transposed DataFrame. See Also -------- numpy.transpose : Permute the dimensions of a given array. Notes ----- Transposing a DataFrame with mixed dtypes will result in a homogeneous DataFrame with the `object` dtype. In such a case, a copy of the data is always made. Examples -------- **Square DataFrame with homogeneous dtype** >>> d1 = {'col1': [1, 2], 'col2': [3, 4]} >>> df1 = pd.DataFrame(data=d1) >>> df1 col1 col2 0 1 3 1 2 4 >>> df1_transposed = df1.T # or df1.transpose() >>> df1_transposed 0 1 col1 1 2 col2 3 4 When the dtype is homogeneous in the original DataFrame, we get a transposed DataFrame with the same dtype: >>> df1.dtypes col1 int64 col2 int64 dtype: object >>> df1_transposed.dtypes 0 int64 1 int64 dtype: object **Non-square DataFrame with mixed dtypes** >>> d2 = {'name': ['Alice', 'Bob'], ... 'score': [9.5, 8], ... 'employed': [False, True], ... 'kids': [0, 0]} >>> df2 = pd.DataFrame(data=d2) >>> df2 name score employed kids 0 Alice 9.5 False 0 1 Bob 8.0 True 0 >>> df2_transposed = df2.T # or df2.transpose() >>> df2_transposed 0 1 name Alice Bob score 9.5 8 employed False True kids 0 0 When the DataFrame has mixed dtypes, we get a transposed DataFrame with the `object` dtype: >>> df2.dtypes name object score float64 employed bool kids int64 dtype: object >>> df2_transposed.dtypes 0 object 1 object dtype: object """ nv.validate_transpose(args, dict()) return super().transpose(1, 0, **kwargs) T = property(transpose) # ---------------------------------------------------------------------- # Picklability # legacy pickle formats def _unpickle_frame_compat(self, state): # pragma: no cover if len(state) == 2: # pragma: no cover series, idx = state columns = sorted(series) else: series, cols, idx = state columns = com._unpickle_array(cols) index = com._unpickle_array(idx) self._data = self._init_dict(series, index, columns, None) def _unpickle_matrix_compat(self, state): # pragma: no cover # old unpickling (vals, idx, cols), object_state = state index = com._unpickle_array(idx) dm = DataFrame(vals, index=index, columns=com._unpickle_array(cols), copy=False) if object_state is not None: ovals, _, ocols = object_state objects = DataFrame(ovals, index=index, columns=com._unpickle_array(ocols), copy=False) dm = dm.join(objects) self._data = dm._data # ---------------------------------------------------------------------- # Getting and setting elements def get_value(self, index, col, takeable=False): """ Quickly retrieve single value at passed column and index. .. deprecated:: 0.21.0 Use .at[] or .iat[] accessors instead. Parameters ---------- index : row label col : column label takeable : interpret the index/col as indexers, default False Returns ------- scalar """ warnings.warn("get_value is deprecated and will be removed " "in a future release. Please use " ".at[] or .iat[] accessors instead", FutureWarning, stacklevel=2) return self._get_value(index, col, takeable=takeable) def _get_value(self, index, col, takeable=False): if takeable: series = self._iget_item_cache(col) return com.maybe_box_datetimelike(series._values[index]) series = self._get_item_cache(col) engine = self.index._engine try: return engine.get_value(series._values, index) except KeyError: # GH 20629 if self.index.nlevels > 1: # partial indexing forbidden raise except (TypeError, ValueError): pass # we cannot handle direct indexing # use positional col = self.columns.get_loc(col) index = self.index.get_loc(index) return self._get_value(index, col, takeable=True) _get_value.__doc__ = get_value.__doc__ def set_value(self, index, col, value, takeable=False): """ Put single value at passed column and index. .. deprecated:: 0.21.0 Use .at[] or .iat[] accessors instead. Parameters ---------- index : row label col : column label value : scalar takeable : interpret the index/col as indexers, default False Returns ------- DataFrame If label pair is contained, will be reference to calling DataFrame, otherwise a new object. """ warnings.warn("set_value is deprecated and will be removed " "in a future release. Please use " ".at[] or .iat[] accessors instead", FutureWarning, stacklevel=2) return self._set_value(index, col, value, takeable=takeable) def _set_value(self, index, col, value, takeable=False): try: if takeable is True: series = self._iget_item_cache(col) return series._set_value(index, value, takeable=True) series = self._get_item_cache(col) engine = self.index._engine engine.set_value(series._values, index, value) return self except (KeyError, TypeError): # set using a non-recursive method & reset the cache if takeable: self.iloc[index, col] = value else: self.loc[index, col] = value self._item_cache.pop(col, None) return self _set_value.__doc__ = set_value.__doc__ def _ixs(self, i, axis=0): """ Parameters ---------- i : int, slice, or sequence of integers axis : int Notes ----- If slice passed, the resulting data will be a view. """ # irow if axis == 0: if isinstance(i, slice): return self[i] else: label = self.index[i] if isinstance(label, Index): # a location index by definition result = self.take(i, axis=axis) copy = True else: new_values = self._data.fast_xs(i) if is_scalar(new_values): return new_values # if we are a copy, mark as such copy = (isinstance(new_values, np.ndarray) and new_values.base is None) result = self._constructor_sliced(new_values, index=self.columns, name=self.index[i], dtype=new_values.dtype) result._set_is_copy(self, copy=copy) return result # icol else: label = self.columns[i] if isinstance(i, slice): # need to return view lab_slice = slice(label[0], label[-1]) return self.loc[:, lab_slice] else: if isinstance(label, Index): return self._take(i, axis=1) index_len = len(self.index) # if the values returned are not the same length # as the index (iow a not found value), iget returns # a 0-len ndarray. This is effectively catching # a numpy error (as numpy should really raise) values = self._data.iget(i) if index_len and not len(values): values = np.array([np.nan] * index_len, dtype=object) result = self._box_col_values(values, label) # this is a cached value, mark it so result._set_as_cached(label, self) return result def __getitem__(self, key): key = lib.item_from_zerodim(key) key = com.apply_if_callable(key, self) # shortcut if the key is in columns try: if self.columns.is_unique and key in self.columns: if self.columns.nlevels > 1: return self._getitem_multilevel(key) return self._get_item_cache(key) except (TypeError, ValueError): # The TypeError correctly catches non hashable "key" (e.g. list) # The ValueError can be removed once GH #21729 is fixed pass # Do we have a slicer (on rows)? indexer = convert_to_index_sliceable(self, key) if indexer is not None: return self._slice(indexer, axis=0) # Do we have a (boolean) DataFrame? if isinstance(key, DataFrame): return self._getitem_frame(key) # Do we have a (boolean) 1d indexer? if com.is_bool_indexer(key): return self._getitem_bool_array(key) # We are left with two options: a single key, and a collection of keys, # We interpret tuples as collections only for non-MultiIndex is_single_key = isinstance(key, tuple) or not is_list_like(key) if is_single_key: if self.columns.nlevels > 1: return self._getitem_multilevel(key) indexer = self.columns.get_loc(key) if is_integer(indexer): indexer = [indexer] else: if is_iterator(key): key = list(key) indexer = self.loc._convert_to_indexer(key, axis=1, raise_missing=True) # take() does not accept boolean indexers if getattr(indexer, "dtype", None) == bool: indexer = np.where(indexer)[0] data = self._take(indexer, axis=1) if is_single_key: # What does looking for a single key in a non-unique index return? # The behavior is inconsistent. It returns a Series, except when # - the key itself is repeated (test on data.shape, #9519), or # - we have a MultiIndex on columns (test on self.columns, #21309) if data.shape[1] == 1 and not isinstance(self.columns, MultiIndex): data = data[key] return data def _getitem_bool_array(self, key): # also raises Exception if object array with NA values # warning here just in case -- previously __setitem__ was # reindexing but __getitem__ was not; it seems more reasonable to # go with the __setitem__ behavior since that is more consistent # with all other indexing behavior if isinstance(key, Series) and not key.index.equals(self.index): warnings.warn("Boolean Series key will be reindexed to match " "DataFrame index.", UserWarning, stacklevel=3) elif len(key) != len(self.index): raise ValueError('Item wrong length %d instead of %d.' % (len(key), len(self.index))) # check_bool_indexer will throw exception if Series key cannot # be reindexed to match DataFrame rows key = check_bool_indexer(self.index, key) indexer = key.nonzero()[0] return self._take(indexer, axis=0) def _getitem_multilevel(self, key): loc = self.columns.get_loc(key) if isinstance(loc, (slice, Series, np.ndarray, Index)): new_columns = self.columns[loc] result_columns = maybe_droplevels(new_columns, key) if self._is_mixed_type: result = self.reindex(columns=new_columns) result.columns = result_columns else: new_values = self.values[:, loc] result = self._constructor(new_values, index=self.index, columns=result_columns) result = result.__finalize__(self) # If there is only one column being returned, and its name is # either an empty string, or a tuple with an empty string as its # first element, then treat the empty string as a placeholder # and return the column as if the user had provided that empty # string in the key. If the result is a Series, exclude the # implied empty string from its name. if len(result.columns) == 1: top = result.columns[0] if isinstance(top, tuple): top = top[0] if top == '': result = result[''] if isinstance(result, Series): result = self._constructor_sliced(result, index=self.index, name=key) result._set_is_copy(self) return result else: return self._get_item_cache(key) def _getitem_frame(self, key): if key.values.size and not is_bool_dtype(key.values): raise ValueError('Must pass DataFrame with boolean values only') return self.where(key) def query(self, expr, inplace=False, **kwargs): """ Query the columns of a DataFrame with a boolean expression. Parameters ---------- expr : str The query string to evaluate. You can refer to variables in the environment by prefixing them with an '@' character like ``@a + b``. .. versionadded:: 0.25.0 You can refer to column names that contain spaces by surrounding them in backticks. For example, if one of your columns is called ``a a`` and you want to sum it with ``b``, your query should be ```a a` + b``. inplace : bool Whether the query should modify the data in place or return a modified copy. **kwargs See the documentation for :func:`eval` for complete details on the keyword arguments accepted by :meth:`DataFrame.query`. .. versionadded:: 0.18.0 Returns ------- DataFrame DataFrame resulting from the provided query expression. See Also -------- eval : Evaluate a string describing operations on DataFrame columns. DataFrame.eval : Evaluate a string describing operations on DataFrame columns. Notes ----- The result of the evaluation of this expression is first passed to :attr:`DataFrame.loc` and if that fails because of a multidimensional key (e.g., a DataFrame) then the result will be passed to :meth:`DataFrame.__getitem__`. This method uses the top-level :func:`eval` function to evaluate the passed query. The :meth:`~pandas.DataFrame.query` method uses a slightly modified Python syntax by default. For example, the ``&`` and ``|`` (bitwise) operators have the precedence of their boolean cousins, :keyword:`and` and :keyword:`or`. This *is* syntactically valid Python, however the semantics are different. You can change the semantics of the expression by passing the keyword argument ``parser='python'``. This enforces the same semantics as evaluation in Python space. Likewise, you can pass ``engine='python'`` to evaluate an expression using Python itself as a backend. This is not recommended as it is inefficient compared to using ``numexpr`` as the engine. The :attr:`DataFrame.index` and :attr:`DataFrame.columns` attributes of the :class:`~pandas.DataFrame` instance are placed in the query namespace by default, which allows you to treat both the index and columns of the frame as a column in the frame. The identifier ``index`` is used for the frame index; you can also use the name of the index to identify it in a query. Please note that Python keywords may not be used as identifiers. For further details and examples see the ``query`` documentation in :ref:`indexing <indexing.query>`. Examples -------- >>> df = pd.DataFrame({'A': range(1, 6), ... 'B': range(10, 0, -2), ... 'C C': range(10, 5, -1)}) >>> df A B C C 0 1 10 10 1 2 8 9 2 3 6 8 3 4 4 7 4 5 2 6 >>> df.query('A > B') A B C C 4 5 2 6 The previous expression is equivalent to >>> df[df.A > df.B] A B C C 4 5 2 6 For columns with spaces in their name, you can use backtick quoting. >>> df.query('B == `C C`') A B C C 0 1 10 10 The previous expression is equivalent to >>> df[df.B == df['C C']] A B C C 0 1 10 10 """ inplace = validate_bool_kwarg(inplace, 'inplace') if not isinstance(expr, str): msg = "expr must be a string to be evaluated, {0} given" raise ValueError(msg.format(type(expr))) kwargs['level'] = kwargs.pop('level', 0) + 1 kwargs['target'] = None res = self.eval(expr, **kwargs) try: new_data = self.loc[res] except ValueError: # when res is multi-dimensional loc raises, but this is sometimes a # valid query new_data = self[res] if inplace: self._update_inplace(new_data) else: return new_data def eval(self, expr, inplace=False, **kwargs): """ Evaluate a string describing operations on DataFrame columns. Operates on columns only, not specific rows or elements. This allows `eval` to run arbitrary code, which can make you vulnerable to code injection if you pass user input to this function. Parameters ---------- expr : str The expression string to evaluate. inplace : bool, default False If the expression contains an assignment, whether to perform the operation inplace and mutate the existing DataFrame. Otherwise, a new DataFrame is returned. .. versionadded:: 0.18.0. kwargs : dict See the documentation for :func:`eval` for complete details on the keyword arguments accepted by :meth:`~pandas.DataFrame.query`. Returns ------- ndarray, scalar, or pandas object The result of the evaluation. See Also -------- DataFrame.query : Evaluates a boolean expression to query the columns of a frame. DataFrame.assign : Can evaluate an expression or function to create new values for a column. eval : Evaluate a Python expression as a string using various backends. Notes ----- For more details see the API documentation for :func:`~eval`. For detailed examples see :ref:`enhancing performance with eval <enhancingperf.eval>`. Examples -------- >>> df = pd.DataFrame({'A': range(1, 6), 'B': range(10, 0, -2)}) >>> df A B 0 1 10 1 2 8 2 3 6 3 4 4 4 5 2 >>> df.eval('A + B') 0 11 1 10 2 9 3 8 4 7 dtype: int64 Assignment is allowed though by default the original DataFrame is not modified. >>> df.eval('C = A + B') A B C 0 1 10 11 1 2 8 10 2 3 6 9 3 4 4 8 4 5 2 7 >>> df A B 0 1 10 1 2 8 2 3 6 3 4 4 4 5 2 Use ``inplace=True`` to modify the original DataFrame. >>> df.eval('C = A + B', inplace=True) >>> df A B C 0 1 10 11 1 2 8 10 2 3 6 9 3 4 4 8 4 5 2 7 """ from pandas.core.computation.eval import eval as _eval inplace = validate_bool_kwarg(inplace, 'inplace') resolvers = kwargs.pop('resolvers', None) kwargs['level'] = kwargs.pop('level', 0) + 1 if resolvers is None: index_resolvers = self._get_index_resolvers() column_resolvers = \ self._get_space_character_free_column_resolvers() resolvers = column_resolvers, index_resolvers if 'target' not in kwargs: kwargs['target'] = self kwargs['resolvers'] = kwargs.get('resolvers', ()) + tuple(resolvers) return _eval(expr, inplace=inplace, **kwargs) def select_dtypes(self, include=None, exclude=None): """ Return a subset of the DataFrame's columns based on the column dtypes. Parameters ---------- include, exclude : scalar or list-like A selection of dtypes or strings to be included/excluded. At least one of these parameters must be supplied. Returns ------- DataFrame The subset of the frame including the dtypes in ``include`` and excluding the dtypes in ``exclude``. Raises ------ ValueError * If both of ``include`` and ``exclude`` are empty * If ``include`` and ``exclude`` have overlapping elements * If any kind of string dtype is passed in. Notes ----- * To select all *numeric* types, use ``np.number`` or ``'number'`` * To select strings you must use the ``object`` dtype, but note that this will return *all* object dtype columns * See the `numpy dtype hierarchy <http://docs.scipy.org/doc/numpy/reference/arrays.scalars.html>`__ * To select datetimes, use ``np.datetime64``, ``'datetime'`` or ``'datetime64'`` * To select timedeltas, use ``np.timedelta64``, ``'timedelta'`` or ``'timedelta64'`` * To select Pandas categorical dtypes, use ``'category'`` * To select Pandas datetimetz dtypes, use ``'datetimetz'`` (new in 0.20.0) or ``'datetime64[ns, tz]'`` Examples -------- >>> df = pd.DataFrame({'a': [1, 2] * 3, ... 'b': [True, False] * 3, ... 'c': [1.0, 2.0] * 3}) >>> df a b c 0 1 True 1.0 1 2 False 2.0 2 1 True 1.0 3 2 False 2.0 4 1 True 1.0 5 2 False 2.0 >>> df.select_dtypes(include='bool') b 0 True 1 False 2 True 3 False 4 True 5 False >>> df.select_dtypes(include=['float64']) c 0 1.0 1 2.0 2 1.0 3 2.0 4 1.0 5 2.0 >>> df.select_dtypes(exclude=['int']) b c 0 True 1.0 1 False 2.0 2 True 1.0 3 False 2.0 4 True 1.0 5 False 2.0 """ def _get_info_slice(obj, indexer): """Slice the info axis of `obj` with `indexer`.""" if not hasattr(obj, '_info_axis_number'): msg = 'object of type {typ!r} has no info axis' raise TypeError(msg.format(typ=type(obj).__name__)) slices = [slice(None)] * obj.ndim slices[obj._info_axis_number] = indexer return tuple(slices) if not is_list_like(include): include = (include,) if include is not None else () if not is_list_like(exclude): exclude = (exclude,) if exclude is not None else () selection = tuple(map(frozenset, (include, exclude))) if not any(selection): raise ValueError('at least one of include or exclude must be ' 'nonempty') # convert the myriad valid dtypes object to a single representation include, exclude = map( lambda x: frozenset(map(infer_dtype_from_object, x)), selection) for dtypes in (include, exclude): invalidate_string_dtypes(dtypes) # can't both include AND exclude! if not include.isdisjoint(exclude): raise ValueError('include and exclude overlap on {inc_ex}'.format( inc_ex=(include & exclude))) # empty include/exclude -> defaults to True # three cases (we've already raised if both are empty) # case 1: empty include, nonempty exclude # we have True, True, ... True for include, same for exclude # in the loop below we get the excluded # and when we call '&' below we get only the excluded # case 2: nonempty include, empty exclude # same as case 1, but with include # case 3: both nonempty # the "union" of the logic of case 1 and case 2: # we get the included and excluded, and return their logical and include_these = Series(not bool(include), index=self.columns) exclude_these = Series(not bool(exclude), index=self.columns) def is_dtype_instance_mapper(idx, dtype): return idx, functools.partial(issubclass, dtype.type) for idx, f in itertools.starmap(is_dtype_instance_mapper, enumerate(self.dtypes)): if include: # checks for the case of empty include or exclude include_these.iloc[idx] = any(map(f, include)) if exclude: exclude_these.iloc[idx] = not any(map(f, exclude)) dtype_indexer = include_these & exclude_these return self.loc[_get_info_slice(self, dtype_indexer)] def _box_item_values(self, key, values): items = self.columns[self.columns.get_loc(key)] if values.ndim == 2: return self._constructor(values.T, columns=items, index=self.index) else: return self._box_col_values(values, items) def _box_col_values(self, values, items): """ Provide boxed values for a column. """ klass = self._constructor_sliced return klass(values, index=self.index, name=items, fastpath=True) def __setitem__(self, key, value): key = com.apply_if_callable(key, self) # see if we can slice the rows indexer = convert_to_index_sliceable(self, key) if indexer is not None: return self._setitem_slice(indexer, value) if isinstance(key, DataFrame) or getattr(key, 'ndim', None) == 2: self._setitem_frame(key, value) elif isinstance(key, (Series, np.ndarray, list, Index)): self._setitem_array(key, value) else: # set column self._set_item(key, value) def _setitem_slice(self, key, value): self._check_setitem_copy() self.loc._setitem_with_indexer(key, value) def _setitem_array(self, key, value): # also raises Exception if object array with NA values if com.is_bool_indexer(key): if len(key) != len(self.index): raise ValueError('Item wrong length %d instead of %d!' % (len(key), len(self.index))) key = check_bool_indexer(self.index, key) indexer = key.nonzero()[0] self._check_setitem_copy() self.loc._setitem_with_indexer(indexer, value) else: if isinstance(value, DataFrame): if len(value.columns) != len(key): raise ValueError('Columns must be same length as key') for k1, k2 in zip(key, value.columns): self[k1] = value[k2] else: indexer = self.loc._convert_to_indexer(key, axis=1) self._check_setitem_copy() self.loc._setitem_with_indexer((slice(None), indexer), value) def _setitem_frame(self, key, value): # support boolean setting with DataFrame input, e.g. # df[df > df2] = 0 if isinstance(key, np.ndarray): if key.shape != self.shape: raise ValueError( 'Array conditional must be same shape as self' ) key = self._constructor(key, **self._construct_axes_dict()) if key.values.size and not is_bool_dtype(key.values): raise TypeError( 'Must pass DataFrame or 2-d ndarray with boolean values only' ) self._check_inplace_setting(value) self._check_setitem_copy() self._where(-key, value, inplace=True) def _ensure_valid_index(self, value): """ Ensure that if we don't have an index, that we can create one from the passed value. """ # GH5632, make sure that we are a Series convertible if not len(self.index) and is_list_like(value): try: value = Series(value) except (ValueError, NotImplementedError, TypeError): raise ValueError('Cannot set a frame with no defined index ' 'and a value that cannot be converted to a ' 'Series') self._data = self._data.reindex_axis(value.index.copy(), axis=1, fill_value=np.nan) def _set_item(self, key, value): """ Add series to DataFrame in specified column. If series is a numpy-array (not a Series/TimeSeries), it must be the same length as the DataFrames index or an error will be thrown. Series/TimeSeries will be conformed to the DataFrames index to ensure homogeneity. """ self._ensure_valid_index(value) value = self._sanitize_column(key, value) NDFrame._set_item(self, key, value) # check if we are modifying a copy # try to set first as we want an invalid # value exception to occur first if len(self): self._check_setitem_copy() def insert(self, loc, column, value, allow_duplicates=False): """ Insert column into DataFrame at specified location. Raises a ValueError if `column` is already contained in the DataFrame, unless `allow_duplicates` is set to True. Parameters ---------- loc : int Insertion index. Must verify 0 <= loc <= len(columns) column : string, number, or hashable object label of the inserted column value : int, Series, or array-like allow_duplicates : bool, optional """ self._ensure_valid_index(value) value = self._sanitize_column(column, value, broadcast=False) self._data.insert(loc, column, value, allow_duplicates=allow_duplicates) def assign(self, **kwargs): r""" Assign new columns to a DataFrame. Returns a new object with all original columns in addition to new ones. Existing columns that are re-assigned will be overwritten. Parameters ---------- **kwargs : dict of {str: callable or Series} The column names are keywords. If the values are callable, they are computed on the DataFrame and assigned to the new columns. The callable must not change input DataFrame (though pandas doesn't check it). If the values are not callable, (e.g. a Series, scalar, or array), they are simply assigned. Returns ------- DataFrame A new DataFrame with the new columns in addition to all the existing columns. Notes ----- Assigning multiple columns within the same ``assign`` is possible. For Python 3.6 and above, later items in '\*\*kwargs' may refer to newly created or modified columns in 'df'; items are computed and assigned into 'df' in order. For Python 3.5 and below, the order of keyword arguments is not specified, you cannot refer to newly created or modified columns. All items are computed first, and then assigned in alphabetical order. .. versionchanged :: 0.23.0 Keyword argument order is maintained for Python 3.6 and later. Examples -------- >>> df = pd.DataFrame({'temp_c': [17.0, 25.0]}, ... index=['Portland', 'Berkeley']) >>> df temp_c Portland 17.0 Berkeley 25.0 Where the value is a callable, evaluated on `df`: >>> df.assign(temp_f=lambda x: x.temp_c * 9 / 5 + 32) temp_c temp_f Portland 17.0 62.6 Berkeley 25.0 77.0 Alternatively, the same behavior can be achieved by directly referencing an existing Series or sequence: >>> df.assign(temp_f=df['temp_c'] * 9 / 5 + 32) temp_c temp_f Portland 17.0 62.6 Berkeley 25.0 77.0 In Python 3.6+, you can create multiple columns within the same assign where one of the columns depends on another one defined within the same assign: >>> df.assign(temp_f=lambda x: x['temp_c'] * 9 / 5 + 32, ... temp_k=lambda x: (x['temp_f'] + 459.67) * 5 / 9) temp_c temp_f temp_k Portland 17.0 62.6 290.15 Berkeley 25.0 77.0 298.15 """ data = self.copy() # >= 3.6 preserve order of kwargs if PY36: for k, v in kwargs.items(): data[k] = com.apply_if_callable(v, data) else: # <= 3.5: do all calculations first... results = OrderedDict() for k, v in kwargs.items(): results[k] = com.apply_if_callable(v, data) # <= 3.5 and earlier results = sorted(results.items()) # ... and then assign for k, v in results: data[k] = v return data def _sanitize_column(self, key, value, broadcast=True): """ Ensures new columns (which go into the BlockManager as new blocks) are always copied and converted into an array. Parameters ---------- key : object value : scalar, Series, or array-like broadcast : bool, default True If ``key`` matches multiple duplicate column names in the DataFrame, this parameter indicates whether ``value`` should be tiled so that the returned array contains a (duplicated) column for each occurrence of the key. If False, ``value`` will not be tiled. Returns ------- numpy.ndarray """ def reindexer(value): # reindex if necessary if value.index.equals(self.index) or not len(self.index): value = value._values.copy() else: # GH 4107 try: value = value.reindex(self.index)._values except Exception as e: # duplicate axis if not value.index.is_unique: raise e # other raise TypeError('incompatible index of inserted column ' 'with frame index') return value if isinstance(value, Series): value = reindexer(value) elif isinstance(value, DataFrame): # align right-hand-side columns if self.columns # is multi-index and self[key] is a sub-frame if isinstance(self.columns, MultiIndex) and key in self.columns: loc = self.columns.get_loc(key) if isinstance(loc, (slice, Series, np.ndarray, Index)): cols = maybe_droplevels(self.columns[loc], key) if len(cols) and not cols.equals(value.columns): value = value.reindex(cols, axis=1) # now align rows value = reindexer(value).T elif isinstance(value, ExtensionArray): # Explicitly copy here, instead of in sanitize_index, # as sanitize_index won't copy an EA, even with copy=True value = value.copy() value = sanitize_index(value, self.index, copy=False) elif isinstance(value, Index) or is_sequence(value): # turn me into an ndarray value = sanitize_index(value, self.index, copy=False) if not isinstance(value, (np.ndarray, Index)): if isinstance(value, list) and len(value) > 0: value = maybe_convert_platform(value) else: value = com.asarray_tuplesafe(value) elif value.ndim == 2: value = value.copy().T elif isinstance(value, Index): value = value.copy(deep=True) else: value = value.copy() # possibly infer to datetimelike if is_object_dtype(value.dtype): value = maybe_infer_to_datetimelike(value) else: # cast ignores pandas dtypes. so save the dtype first infer_dtype, _ = infer_dtype_from_scalar( value, pandas_dtype=True) # upcast value = cast_scalar_to_array(len(self.index), value) value = maybe_cast_to_datetime(value, infer_dtype) # return internal types directly if is_extension_type(value) or is_extension_array_dtype(value): return value # broadcast across multiple columns if necessary if broadcast and key in self.columns and value.ndim == 1: if (not self.columns.is_unique or isinstance(self.columns, MultiIndex)): existing_piece = self[key] if isinstance(existing_piece, DataFrame): value = np.tile(value, (len(existing_piece.columns), 1)) return np.atleast_2d(np.asarray(value)) @property def _series(self): return {item: Series(self._data.iget(idx), index=self.index, name=item) for idx, item in enumerate(self.columns)} def lookup(self, row_labels, col_labels): """ Label-based "fancy indexing" function for DataFrame. Given equal-length arrays of row and column labels, return an array of the values corresponding to each (row, col) pair. Parameters ---------- row_labels : sequence The row labels to use for lookup col_labels : sequence The column labels to use for lookup Returns ------- numpy.ndarray Notes ----- Akin to:: result = [df.get_value(row, col) for row, col in zip(row_labels, col_labels)] Examples -------- values : ndarray The found values """ n = len(row_labels) if n != len(col_labels): raise ValueError('Row labels must have same size as column labels') thresh = 1000 if not self._is_mixed_type or n > thresh: values = self.values ridx = self.index.get_indexer(row_labels) cidx = self.columns.get_indexer(col_labels) if (ridx == -1).any(): raise KeyError('One or more row labels was not found') if (cidx == -1).any(): raise KeyError('One or more column labels was not found') flat_index = ridx * len(self.columns) + cidx result = values.flat[flat_index] else: result = np.empty(n, dtype='O') for i, (r, c) in enumerate(zip(row_labels, col_labels)): result[i] = self._get_value(r, c) if is_object_dtype(result): result = lib.maybe_convert_objects(result) return result # ---------------------------------------------------------------------- # Reindexing and alignment def _reindex_axes(self, axes, level, limit, tolerance, method, fill_value, copy): frame = self columns = axes['columns'] if columns is not None: frame = frame._reindex_columns(columns, method, copy, level, fill_value, limit, tolerance) index = axes['index'] if index is not None: frame = frame._reindex_index(index, method, copy, level, fill_value, limit, tolerance) return frame def _reindex_index(self, new_index, method, copy, level, fill_value=np.nan, limit=None, tolerance=None): new_index, indexer = self.index.reindex(new_index, method=method, level=level, limit=limit, tolerance=tolerance) return self._reindex_with_indexers({0: [new_index, indexer]}, copy=copy, fill_value=fill_value, allow_dups=False) def _reindex_columns(self, new_columns, method, copy, level, fill_value=None, limit=None, tolerance=None): new_columns, indexer = self.columns.reindex(new_columns, method=method, level=level, limit=limit, tolerance=tolerance) return self._reindex_with_indexers({1: [new_columns, indexer]}, copy=copy, fill_value=fill_value, allow_dups=False) def _reindex_multi(self, axes, copy, fill_value): """ We are guaranteed non-Nones in the axes. """ new_index, row_indexer = self.index.reindex(axes['index']) new_columns, col_indexer = self.columns.reindex(axes['columns']) if row_indexer is not None and col_indexer is not None: indexer = row_indexer, col_indexer new_values = algorithms.take_2d_multi(self.values, indexer, fill_value=fill_value) return self._constructor(new_values, index=new_index, columns=new_columns) else: return self._reindex_with_indexers({0: [new_index, row_indexer], 1: [new_columns, col_indexer]}, copy=copy, fill_value=fill_value) @Appender(_shared_docs['align'] % _shared_doc_kwargs) def align(self, other, join='outer', axis=None, level=None, copy=True, fill_value=None, method=None, limit=None, fill_axis=0, broadcast_axis=None): return super().align(other, join=join, axis=axis, level=level, copy=copy, fill_value=fill_value, method=method, limit=limit, fill_axis=fill_axis, broadcast_axis=broadcast_axis) @Substitution(**_shared_doc_kwargs) @Appender(NDFrame.reindex.__doc__) @rewrite_axis_style_signature('labels', [('method', None), ('copy', True), ('level', None), ('fill_value', np.nan), ('limit', None), ('tolerance', None)]) def reindex(self, *args, **kwargs): axes = validate_axis_style_args(self, args, kwargs, 'labels', 'reindex') kwargs.update(axes) # Pop these, since the values are in `kwargs` under different names kwargs.pop('axis', None) kwargs.pop('labels', None) return super().reindex(**kwargs) @Appender(_shared_docs['reindex_axis'] % _shared_doc_kwargs) def reindex_axis(self, labels, axis=0, method=None, level=None, copy=True, limit=None, fill_value=np.nan): return super().reindex_axis(labels=labels, axis=axis, method=method, level=level, copy=copy, limit=limit, fill_value=fill_value) def drop(self, labels=None, axis=0, index=None, columns=None, level=None, inplace=False, errors='raise'): """ Drop specified labels from rows or columns. Remove rows or columns by specifying label names and corresponding axis, or by specifying directly index or column names. When using a multi-index, labels on different levels can be removed by specifying the level. Parameters ---------- labels : single label or list-like Index or column labels to drop. axis : {0 or 'index', 1 or 'columns'}, default 0 Whether to drop labels from the index (0 or 'index') or columns (1 or 'columns'). index : single label or list-like Alternative to specifying axis (``labels, axis=0`` is equivalent to ``index=labels``). .. versionadded:: 0.21.0 columns : single label or list-like Alternative to specifying axis (``labels, axis=1`` is equivalent to ``columns=labels``). .. versionadded:: 0.21.0 level : int or level name, optional For MultiIndex, level from which the labels will be removed. inplace : bool, default False If True, do operation inplace and return None. errors : {'ignore', 'raise'}, default 'raise' If 'ignore', suppress error and only existing labels are dropped. Returns ------- DataFrame DataFrame without the removed index or column labels. Raises ------ KeyError If any of the labels is not found in the selected axis. See Also -------- DataFrame.loc : Label-location based indexer for selection by label. DataFrame.dropna : Return DataFrame with labels on given axis omitted where (all or any) data are missing. DataFrame.drop_duplicates : Return DataFrame with duplicate rows removed, optionally only considering certain columns. Series.drop : Return Series with specified index labels removed. Examples -------- >>> df = pd.DataFrame(np.arange(12).reshape(3, 4), ... columns=['A', 'B', 'C', 'D']) >>> df A B C D 0 0 1 2 3 1 4 5 6 7 2 8 9 10 11 Drop columns >>> df.drop(['B', 'C'], axis=1) A D 0 0 3 1 4 7 2 8 11 >>> df.drop(columns=['B', 'C']) A D 0 0 3 1 4 7 2 8 11 Drop a row by index >>> df.drop([0, 1]) A B C D 2 8 9 10 11 Drop columns and/or rows of MultiIndex DataFrame >>> midx = pd.MultiIndex(levels=[['lama', 'cow', 'falcon'], ... ['speed', 'weight', 'length']], ... codes=[[0, 0, 0, 1, 1, 1, 2, 2, 2], ... [0, 1, 2, 0, 1, 2, 0, 1, 2]]) >>> df = pd.DataFrame(index=midx, columns=['big', 'small'], ... data=[[45, 30], [200, 100], [1.5, 1], [30, 20], ... [250, 150], [1.5, 0.8], [320, 250], ... [1, 0.8], [0.3, 0.2]]) >>> df big small lama speed 45.0 30.0 weight 200.0 100.0 length 1.5 1.0 cow speed 30.0 20.0 weight 250.0 150.0 length 1.5 0.8 falcon speed 320.0 250.0 weight 1.0 0.8 length 0.3 0.2 >>> df.drop(index='cow', columns='small') big lama speed 45.0 weight 200.0 length 1.5 falcon speed 320.0 weight 1.0 length 0.3 >>> df.drop(index='length', level=1) big small lama speed 45.0 30.0 weight 200.0 100.0 cow speed 30.0 20.0 weight 250.0 150.0 falcon speed 320.0 250.0 weight 1.0 0.8 """ return super().drop(labels=labels, axis=axis, index=index, columns=columns, level=level, inplace=inplace, errors=errors) @rewrite_axis_style_signature('mapper', [('copy', True), ('inplace', False), ('level', None), ('errors', 'ignore')]) def rename(self, *args, **kwargs): """ Alter axes labels. Function / dict values must be unique (1-to-1). Labels not contained in a dict / Series will be left as-is. Extra labels listed don't throw an error. See the :ref:`user guide <basics.rename>` for more. Parameters ---------- mapper : dict-like or function Dict-like or functions transformations to apply to that axis' values. Use either ``mapper`` and ``axis`` to specify the axis to target with ``mapper``, or ``index`` and ``columns``. index : dict-like or function Alternative to specifying axis (``mapper, axis=0`` is equivalent to ``index=mapper``). columns : dict-like or function Alternative to specifying axis (``mapper, axis=1`` is equivalent to ``columns=mapper``). axis : int or str Axis to target with ``mapper``. Can be either the axis name ('index', 'columns') or number (0, 1). The default is 'index'. copy : bool, default True Also copy underlying data. inplace : bool, default False Whether to return a new DataFrame. If True then value of copy is ignored. level : int or level name, default None In case of a MultiIndex, only rename labels in the specified level. errors : {'ignore', 'raise'}, default 'ignore' If 'raise', raise a `KeyError` when a dict-like `mapper`, `index`, or `columns` contains labels that are not present in the Index being transformed. If 'ignore', existing keys will be renamed and extra keys will be ignored. Returns ------- DataFrame DataFrame with the renamed axis labels. Raises ------ KeyError If any of the labels is not found in the selected axis and "errors='raise'". See Also -------- DataFrame.rename_axis : Set the name of the axis. Examples -------- ``DataFrame.rename`` supports two calling conventions * ``(index=index_mapper, columns=columns_mapper, ...)`` * ``(mapper, axis={'index', 'columns'}, ...)`` We *highly* recommend using keyword arguments to clarify your intent. Rename columns using a mapping: >>> df = pd.DataFrame({"A": [1, 2, 3], "B": [4, 5, 6]}) >>> df.rename(columns={"A": "a", "B": "c"}) a c 0 1 4 1 2 5 2 3 6 Rename index using a mapping: >>> df.rename(index={0: "x", 1: "y", 2: "z"}) A B x 1 4 y 2 5 z 3 6 Cast index labels to a different type: >>> df.index RangeIndex(start=0, stop=3, step=1) >>> df.rename(index=str).index Index(['0', '1', '2'], dtype='object') >>> df.rename(columns={"A": "a", "B": "b", "C": "c"}, errors="raise") Traceback (most recent call last): KeyError: ['C'] not found in axis Using axis-style parameters >>> df.rename(str.lower, axis='columns') a b 0 1 4 1 2 5 2 3 6 >>> df.rename({1: 2, 2: 4}, axis='index') A B 0 1 4 2 2 5 4 3 6 """ axes = validate_axis_style_args(self, args, kwargs, 'mapper', 'rename') kwargs.update(axes) # Pop these, since the values are in `kwargs` under different names kwargs.pop('axis', None) kwargs.pop('mapper', None) return super().rename(**kwargs) @Substitution(**_shared_doc_kwargs) @Appender(NDFrame.fillna.__doc__) def fillna(self, value=None, method=None, axis=None, inplace=False, limit=None, downcast=None, **kwargs): return super().fillna(value=value, method=method, axis=axis, inplace=inplace, limit=limit, downcast=downcast, **kwargs) @Appender(_shared_docs['replace'] % _shared_doc_kwargs) def replace(self, to_replace=None, value=None, inplace=False, limit=None, regex=False, method='pad'): return super().replace(to_replace=to_replace, value=value, inplace=inplace, limit=limit, regex=regex, method=method) @Appender(_shared_docs['shift'] % _shared_doc_kwargs) def shift(self, periods=1, freq=None, axis=0, fill_value=None): return super().shift(periods=periods, freq=freq, axis=axis, fill_value=fill_value) def set_index(self, keys, drop=True, append=False, inplace=False, verify_integrity=False): """ Set the DataFrame index using existing columns. Set the DataFrame index (row labels) using one or more existing columns or arrays (of the correct length). The index can replace the existing index or expand on it. Parameters ---------- keys : label or array-like or list of labels/arrays This parameter can be either a single column key, a single array of the same length as the calling DataFrame, or a list containing an arbitrary combination of column keys and arrays. Here, "array" encompasses :class:`Series`, :class:`Index`, ``np.ndarray``, and instances of :class:`~collections.abc.Iterator`. drop : bool, default True Delete columns to be used as the new index. append : bool, default False Whether to append columns to existing index. inplace : bool, default False Modify the DataFrame in place (do not create a new object). verify_integrity : bool, default False Check the new index for duplicates. Otherwise defer the check until necessary. Setting to False will improve the performance of this method. Returns ------- DataFrame Changed row labels. See Also -------- DataFrame.reset_index : Opposite of set_index. DataFrame.reindex : Change to new indices or expand indices. DataFrame.reindex_like : Change to same indices as other DataFrame. Examples -------- >>> df = pd.DataFrame({'month': [1, 4, 7, 10], ... 'year': [2012, 2014, 2013, 2014], ... 'sale': [55, 40, 84, 31]}) >>> df month year sale 0 1 2012 55 1 4 2014 40 2 7 2013 84 3 10 2014 31 Set the index to become the 'month' column: >>> df.set_index('month') year sale month 1 2012 55 4 2014 40 7 2013 84 10 2014 31 Create a MultiIndex using columns 'year' and 'month': >>> df.set_index(['year', 'month']) sale year month 2012 1 55 2014 4 40 2013 7 84 2014 10 31 Create a MultiIndex using an Index and a column: >>> df.set_index([pd.Index([1, 2, 3, 4]), 'year']) month sale year 1 2012 1 55 2 2014 4 40 3 2013 7 84 4 2014 10 31 Create a MultiIndex using two Series: >>> s = pd.Series([1, 2, 3, 4]) >>> df.set_index([s, s**2]) month year sale 1 1 1 2012 55 2 4 4 2014 40 3 9 7 2013 84 4 16 10 2014 31 """ inplace = validate_bool_kwarg(inplace, 'inplace') if not isinstance(keys, list): keys = [keys] err_msg = ('The parameter "keys" may be a column key, one-dimensional ' 'array, or a list containing only valid column keys and ' 'one-dimensional arrays.') missing = [] for col in keys: if isinstance(col, (ABCIndexClass, ABCSeries, np.ndarray, list, abc.Iterator)): # arrays are fine as long as they are one-dimensional # iterators get converted to list below if getattr(col, 'ndim', 1) != 1: raise ValueError(err_msg) else: # everything else gets tried as a key; see GH 24969 try: found = col in self.columns except TypeError: raise TypeError(err_msg + ' Received column of ' 'type {}'.format(type(col))) else: if not found: missing.append(col) if missing: raise KeyError('None of {} are in the columns'.format(missing)) if inplace: frame = self else: frame = self.copy() arrays = [] names = [] if append: names = [x for x in self.index.names] if isinstance(self.index, ABCMultiIndex): for i in range(self.index.nlevels): arrays.append(self.index._get_level_values(i)) else: arrays.append(self.index) to_remove = [] for col in keys: if isinstance(col, ABCMultiIndex): for n in range(col.nlevels): arrays.append(col._get_level_values(n)) names.extend(col.names) elif isinstance(col, (ABCIndexClass, ABCSeries)): # if Index then not MultiIndex (treated above) arrays.append(col) names.append(col.name) elif isinstance(col, (list, np.ndarray)): arrays.append(col) names.append(None) elif isinstance(col, abc.Iterator): arrays.append(list(col)) names.append(None) # from here, col can only be a column label else: arrays.append(frame[col]._values) names.append(col) if drop: to_remove.append(col) if len(arrays[-1]) != len(self): # check newest element against length of calling frame, since # ensure_index_from_sequences would not raise for append=False. raise ValueError('Length mismatch: Expected {len_self} rows, ' 'received array of length {len_col}'.format( len_self=len(self), len_col=len(arrays[-1]) )) index = ensure_index_from_sequences(arrays, names) if verify_integrity and not index.is_unique: duplicates = index[index.duplicated()].unique() raise ValueError('Index has duplicate keys: {dup}'.format( dup=duplicates)) # use set to handle duplicate column names gracefully in case of drop for c in set(to_remove): del frame[c] # clear up memory usage index._cleanup() frame.index = index if not inplace: return frame def reset_index(self, level=None, drop=False, inplace=False, col_level=0, col_fill=''): """ Reset the index, or a level of it. Reset the index of the DataFrame, and use the default one instead. If the DataFrame has a MultiIndex, this method can remove one or more levels. Parameters ---------- level : int, str, tuple, or list, default None Only remove the given levels from the index. Removes all levels by default. drop : bool, default False Do not try to insert index into dataframe columns. This resets the index to the default integer index. inplace : bool, default False Modify the DataFrame in place (do not create a new object). col_level : int or str, default 0 If the columns have multiple levels, determines which level the labels are inserted into. By default it is inserted into the first level. col_fill : object, default '' If the columns have multiple levels, determines how the other levels are named. If None then the index name is repeated. Returns ------- DataFrame DataFrame with the new index. See Also -------- DataFrame.set_index : Opposite of reset_index. DataFrame.reindex : Change to new indices or expand indices. DataFrame.reindex_like : Change to same indices as other DataFrame. Examples -------- >>> df = pd.DataFrame([('bird', 389.0), ... ('bird', 24.0), ... ('mammal', 80.5), ... ('mammal', np.nan)], ... index=['falcon', 'parrot', 'lion', 'monkey'], ... columns=('class', 'max_speed')) >>> df class max_speed falcon bird 389.0 parrot bird 24.0 lion mammal 80.5 monkey mammal NaN When we reset the index, the old index is added as a column, and a new sequential index is used: >>> df.reset_index() index class max_speed 0 falcon bird 389.0 1 parrot bird 24.0 2 lion mammal 80.5 3 monkey mammal NaN We can use the `drop` parameter to avoid the old index being added as a column: >>> df.reset_index(drop=True) class max_speed 0 bird 389.0 1 bird 24.0 2 mammal 80.5 3 mammal NaN You can also use `reset_index` with `MultiIndex`. >>> index = pd.MultiIndex.from_tuples([('bird', 'falcon'), ... ('bird', 'parrot'), ... ('mammal', 'lion'), ... ('mammal', 'monkey')], ... names=['class', 'name']) >>> columns = pd.MultiIndex.from_tuples([('speed', 'max'), ... ('species', 'type')]) >>> df = pd.DataFrame([(389.0, 'fly'), ... ( 24.0, 'fly'), ... ( 80.5, 'run'), ... (np.nan, 'jump')], ... index=index, ... columns=columns) >>> df speed species max type class name bird falcon 389.0 fly parrot 24.0 fly mammal lion 80.5 run monkey NaN jump If the index has multiple levels, we can reset a subset of them: >>> df.reset_index(level='class') class speed species max type name falcon bird 389.0 fly parrot bird 24.0 fly lion mammal 80.5 run monkey mammal NaN jump If we are not dropping the index, by default, it is placed in the top level. We can place it in another level: >>> df.reset_index(level='class', col_level=1) speed species class max type name falcon bird 389.0 fly parrot bird 24.0 fly lion mammal 80.5 run monkey mammal NaN jump When the index is inserted under another level, we can specify under which one with the parameter `col_fill`: >>> df.reset_index(level='class', col_level=1, col_fill='species') species speed species class max type name falcon bird 389.0 fly parrot bird 24.0 fly lion mammal 80.5 run monkey mammal NaN jump If we specify a nonexistent level for `col_fill`, it is created: >>> df.reset_index(level='class', col_level=1, col_fill='genus') genus speed species class max type name falcon bird 389.0 fly parrot bird 24.0 fly lion mammal 80.5 run monkey mammal NaN jump """ inplace = validate_bool_kwarg(inplace, 'inplace') if inplace: new_obj = self else: new_obj = self.copy() def _maybe_casted_values(index, labels=None): values = index._values if not isinstance(index, (PeriodIndex, DatetimeIndex)): if values.dtype == np.object_: values = lib.maybe_convert_objects(values) # if we have the labels, extract the values with a mask if labels is not None: mask = labels == -1 # we can have situations where the whole mask is -1, # meaning there is nothing found in labels, so make all nan's if mask.all(): values = np.empty(len(mask)) values.fill(np.nan) else: values = values.take(labels) # TODO(https://github.com/pandas-dev/pandas/issues/24206) # Push this into maybe_upcast_putmask? # We can't pass EAs there right now. Looks a bit # complicated. # So we unbox the ndarray_values, op, re-box. values_type = type(values) values_dtype = values.dtype if issubclass(values_type, DatetimeLikeArray): values = values._data if mask.any(): values, changed = maybe_upcast_putmask( values, mask, np.nan) if issubclass(values_type, DatetimeLikeArray): values = values_type(values, dtype=values_dtype) return values new_index = ibase.default_index(len(new_obj)) if level is not None: if not isinstance(level, (tuple, list)): level = [level] level = [self.index._get_level_number(lev) for lev in level] if len(level) < self.index.nlevels: new_index = self.index.droplevel(level) if not drop: if isinstance(self.index, MultiIndex): names = [n if n is not None else ('level_%d' % i) for (i, n) in enumerate(self.index.names)] to_insert = zip(self.index.levels, self.index.codes) else: default = 'index' if 'index' not in self else 'level_0' names = ([default] if self.index.name is None else [self.index.name]) to_insert = ((self.index, None),) multi_col = isinstance(self.columns, MultiIndex) for i, (lev, lab) in reversed(list(enumerate(to_insert))): if not (level is None or i in level): continue name = names[i] if multi_col: col_name = (list(name) if isinstance(name, tuple) else [name]) if col_fill is None: if len(col_name) not in (1, self.columns.nlevels): raise ValueError("col_fill=None is incompatible " "with incomplete column name " "{}".format(name)) col_fill = col_name[0] lev_num = self.columns._get_level_number(col_level) name_lst = [col_fill] * lev_num + col_name missing = self.columns.nlevels - len(name_lst) name_lst += [col_fill] * missing name = tuple(name_lst) # to ndarray and maybe infer different dtype level_values = _maybe_casted_values(lev, lab) new_obj.insert(0, name, level_values) new_obj.index = new_index if not inplace: return new_obj # ---------------------------------------------------------------------- # Reindex-based selection methods @Appender(_shared_docs['isna'] % _shared_doc_kwargs) def isna(self): return super().isna() @Appender(_shared_docs['isna'] % _shared_doc_kwargs) def isnull(self): return super().isnull() @Appender(_shared_docs['notna'] % _shared_doc_kwargs) def notna(self): return super().notna() @Appender(_shared_docs['notna'] % _shared_doc_kwargs) def notnull(self): return super().notnull() def dropna(self, axis=0, how='any', thresh=None, subset=None, inplace=False): """ Remove missing values. See the :ref:`User Guide <missing_data>` for more on which values are considered missing, and how to work with missing data. Parameters ---------- axis : {0 or 'index', 1 or 'columns'}, default 0 Determine if rows or columns which contain missing values are removed. * 0, or 'index' : Drop rows which contain missing values. * 1, or 'columns' : Drop columns which contain missing value. .. deprecated:: 0.23.0 Pass tuple or list to drop on multiple axes. Only a single axis is allowed. how : {'any', 'all'}, default 'any' Determine if row or column is removed from DataFrame, when we have at least one NA or all NA. * 'any' : If any NA values are present, drop that row or column. * 'all' : If all values are NA, drop that row or column. thresh : int, optional Require that many non-NA values. subset : array-like, optional Labels along other axis to consider, e.g. if you are dropping rows these would be a list of columns to include. inplace : bool, default False If True, do operation inplace and return None. Returns ------- DataFrame DataFrame with NA entries dropped from it. See Also -------- DataFrame.isna: Indicate missing values. DataFrame.notna : Indicate existing (non-missing) values. DataFrame.fillna : Replace missing values. Series.dropna : Drop missing values. Index.dropna : Drop missing indices. Examples -------- >>> df = pd.DataFrame({"name": ['Alfred', 'Batman', 'Catwoman'], ... "toy": [np.nan, 'Batmobile', 'Bullwhip'], ... "born": [pd.NaT, pd.Timestamp("1940-04-25"), ... pd.NaT]}) >>> df name toy born 0 Alfred NaN NaT 1 Batman Batmobile 1940-04-25 2 Catwoman Bullwhip NaT Drop the rows where at least one element is missing. >>> df.dropna() name toy born 1 Batman Batmobile 1940-04-25 Drop the columns where at least one element is missing. >>> df.dropna(axis='columns') name 0 Alfred 1 Batman 2 Catwoman Drop the rows where all elements are missing. >>> df.dropna(how='all') name toy born 0 Alfred NaN NaT 1 Batman Batmobile 1940-04-25 2 Catwoman Bullwhip NaT Keep only the rows with at least 2 non-NA values. >>> df.dropna(thresh=2) name toy born 1 Batman Batmobile 1940-04-25 2 Catwoman Bullwhip NaT Define in which columns to look for missing values. >>> df.dropna(subset=['name', 'born']) name toy born 1 Batman Batmobile 1940-04-25 Keep the DataFrame with valid entries in the same variable. >>> df.dropna(inplace=True) >>> df name toy born 1 Batman Batmobile 1940-04-25 """ inplace = validate_bool_kwarg(inplace, 'inplace') if isinstance(axis, (tuple, list)): # GH20987 msg = ("supplying multiple axes to axis is deprecated and " "will be removed in a future version.") warnings.warn(msg, FutureWarning, stacklevel=2) result = self for ax in axis: result = result.dropna(how=how, thresh=thresh, subset=subset, axis=ax) else: axis = self._get_axis_number(axis) agg_axis = 1 - axis agg_obj = self if subset is not None: ax = self._get_axis(agg_axis) indices = ax.get_indexer_for(subset) check = indices == -1 if check.any(): raise KeyError(list(np.compress(check, subset))) agg_obj = self.take(indices, axis=agg_axis) count = agg_obj.count(axis=agg_axis) if thresh is not None: mask = count >= thresh elif how == 'any': mask = count == len(agg_obj._get_axis(agg_axis)) elif how == 'all': mask = count > 0 else: if how is not None: raise ValueError('invalid how option: {h}'.format(h=how)) else: raise TypeError('must specify how or thresh') result = self.loc(axis=axis)[mask] if inplace: self._update_inplace(result) else: return result def drop_duplicates(self, subset=None, keep='first', inplace=False): """ Return DataFrame with duplicate rows removed, optionally only considering certain columns. Indexes, including time indexes are ignored. Parameters ---------- subset : column label or sequence of labels, optional Only consider certain columns for identifying duplicates, by default use all of the columns keep : {'first', 'last', False}, default 'first' - ``first`` : Drop duplicates except for the first occurrence. - ``last`` : Drop duplicates except for the last occurrence. - False : Drop all duplicates. inplace : boolean, default False Whether to drop duplicates in place or to return a copy Returns ------- DataFrame """ if self.empty: return self.copy() inplace = validate_bool_kwarg(inplace, 'inplace') duplicated = self.duplicated(subset, keep=keep) if inplace: inds, = (-duplicated)._ndarray_values.nonzero() new_data = self._data.take(inds) self._update_inplace(new_data) else: return self[-duplicated] def duplicated(self, subset=None, keep='first'): """ Return boolean Series denoting duplicate rows, optionally only considering certain columns. Parameters ---------- subset : column label or sequence of labels, optional Only consider certain columns for identifying duplicates, by default use all of the columns keep : {'first', 'last', False}, default 'first' - ``first`` : Mark duplicates as ``True`` except for the first occurrence. - ``last`` : Mark duplicates as ``True`` except for the last occurrence. - False : Mark all duplicates as ``True``. Returns ------- Series """ from pandas.core.sorting import get_group_index from pandas._libs.hashtable import duplicated_int64, _SIZE_HINT_LIMIT if self.empty: return Series(dtype=bool) def f(vals): labels, shape = algorithms.factorize( vals, size_hint=min(len(self), _SIZE_HINT_LIMIT)) return labels.astype('i8', copy=False), len(shape) if subset is None: subset = self.columns elif (not np.iterable(subset) or isinstance(subset, str) or isinstance(subset, tuple) and subset in self.columns): subset = subset, # Verify all columns in subset exist in the queried dataframe # Otherwise, raise a KeyError, same as if you try to __getitem__ with a # key that doesn't exist. diff = Index(subset).difference(self.columns) if not diff.empty: raise KeyError(diff) vals = (col.values for name, col in self.iteritems() if name in subset) labels, shape = map(list, zip(*map(f, vals))) ids = get_group_index(labels, shape, sort=False, xnull=False) return Series(duplicated_int64(ids, keep), index=self.index) # ---------------------------------------------------------------------- # Sorting @Substitution(**_shared_doc_kwargs) @Appender(NDFrame.sort_values.__doc__) def sort_values(self, by, axis=0, ascending=True, inplace=False, kind='quicksort', na_position='last'): inplace = validate_bool_kwarg(inplace, 'inplace') axis = self._get_axis_number(axis) if not isinstance(by, list): by = [by] if is_sequence(ascending) and len(by) != len(ascending): raise ValueError('Length of ascending (%d) != length of by (%d)' % (len(ascending), len(by))) if len(by) > 1: from pandas.core.sorting import lexsort_indexer keys = [self._get_label_or_level_values(x, axis=axis) for x in by] indexer = lexsort_indexer(keys, orders=ascending, na_position=na_position) indexer = ensure_platform_int(indexer) else: from pandas.core.sorting import nargsort by = by[0] k = self._get_label_or_level_values(by, axis=axis) if isinstance(ascending, (tuple, list)): ascending = ascending[0] indexer = nargsort(k, kind=kind, ascending=ascending, na_position=na_position) new_data = self._data.take(indexer, axis=self._get_block_manager_axis(axis), verify=False) if inplace: return self._update_inplace(new_data) else: return self._constructor(new_data).__finalize__(self) @Substitution(**_shared_doc_kwargs) @Appender(NDFrame.sort_index.__doc__) def sort_index(self, axis=0, level=None, ascending=True, inplace=False, kind='quicksort', na_position='last', sort_remaining=True, by=None): # TODO: this can be combined with Series.sort_index impl as # almost identical inplace = validate_bool_kwarg(inplace, 'inplace') # 10726 if by is not None: warnings.warn("by argument to sort_index is deprecated, " "please use .sort_values(by=...)", FutureWarning, stacklevel=2) if level is not None: raise ValueError("unable to simultaneously sort by and level") return self.sort_values(by, axis=axis, ascending=ascending, inplace=inplace) axis = self._get_axis_number(axis) labels = self._get_axis(axis) # make sure that the axis is lexsorted to start # if not we need to reconstruct to get the correct indexer labels = labels._sort_levels_monotonic() if level is not None: new_axis, indexer = labels.sortlevel(level, ascending=ascending, sort_remaining=sort_remaining) elif isinstance(labels, MultiIndex): from pandas.core.sorting import lexsort_indexer indexer = lexsort_indexer(labels._get_codes_for_sorting(), orders=ascending, na_position=na_position) else: from pandas.core.sorting import nargsort # Check monotonic-ness before sort an index # GH11080 if ((ascending and labels.is_monotonic_increasing) or (not ascending and labels.is_monotonic_decreasing)): if inplace: return else: return self.copy() indexer = nargsort(labels, kind=kind, ascending=ascending, na_position=na_position) baxis = self._get_block_manager_axis(axis) new_data = self._data.take(indexer, axis=baxis, verify=False) # reconstruct axis if needed new_data.axes[baxis] = new_data.axes[baxis]._sort_levels_monotonic() if inplace: return self._update_inplace(new_data) else: return self._constructor(new_data).__finalize__(self) def nlargest(self, n, columns, keep='first'): """ Return the first `n` rows ordered by `columns` in descending order. Return the first `n` rows with the largest values in `columns`, in descending order. The columns that are not specified are returned as well, but not used for ordering. This method is equivalent to ``df.sort_values(columns, ascending=False).head(n)``, but more performant. Parameters ---------- n : int Number of rows to return. columns : label or list of labels Column label(s) to order by. keep : {'first', 'last', 'all'}, default 'first' Where there are duplicate values: - `first` : prioritize the first occurrence(s) - `last` : prioritize the last occurrence(s) - ``all`` : do not drop any duplicates, even it means selecting more than `n` items. .. versionadded:: 0.24.0 Returns ------- DataFrame The first `n` rows ordered by the given columns in descending order. See Also -------- DataFrame.nsmallest : Return the first `n` rows ordered by `columns` in ascending order. DataFrame.sort_values : Sort DataFrame by the values. DataFrame.head : Return the first `n` rows without re-ordering. Notes ----- This function cannot be used with all column types. For example, when specifying columns with `object` or `category` dtypes, ``TypeError`` is raised. Examples -------- >>> df = pd.DataFrame({'population': [59000000, 65000000, 434000, ... 434000, 434000, 337000, 11300, ... 11300, 11300], ... 'GDP': [1937894, 2583560 , 12011, 4520, 12128, ... 17036, 182, 38, 311], ... 'alpha-2': ["IT", "FR", "MT", "MV", "BN", ... "IS", "NR", "TV", "AI"]}, ... index=["Italy", "France", "Malta", ... "Maldives", "Brunei", "Iceland", ... "Nauru", "Tuvalu", "Anguilla"]) >>> df population GDP alpha-2 Italy 59000000 1937894 IT France 65000000 2583560 FR Malta 434000 12011 MT Maldives 434000 4520 MV Brunei 434000 12128 BN Iceland 337000 17036 IS Nauru 11300 182 NR Tuvalu 11300 38 TV Anguilla 11300 311 AI In the following example, we will use ``nlargest`` to select the three rows having the largest values in column "population". >>> df.nlargest(3, 'population') population GDP alpha-2 France 65000000 2583560 FR Italy 59000000 1937894 IT Malta 434000 12011 MT When using ``keep='last'``, ties are resolved in reverse order: >>> df.nlargest(3, 'population', keep='last') population GDP alpha-2 France 65000000 2583560 FR Italy 59000000 1937894 IT Brunei 434000 12128 BN When using ``keep='all'``, all duplicate items are maintained: >>> df.nlargest(3, 'population', keep='all') population GDP alpha-2 France 65000000 2583560 FR Italy 59000000 1937894 IT Malta 434000 12011 MT Maldives 434000 4520 MV Brunei 434000 12128 BN To order by the largest values in column "population" and then "GDP", we can specify multiple columns like in the next example. >>> df.nlargest(3, ['population', 'GDP']) population GDP alpha-2 France 65000000 2583560 FR Italy 59000000 1937894 IT Brunei 434000 12128 BN """ return algorithms.SelectNFrame(self, n=n, keep=keep, columns=columns).nlargest() def nsmallest(self, n, columns, keep='first'): """ Return the first `n` rows ordered by `columns` in ascending order. Return the first `n` rows with the smallest values in `columns`, in ascending order. The columns that are not specified are returned as well, but not used for ordering. This method is equivalent to ``df.sort_values(columns, ascending=True).head(n)``, but more performant. Parameters ---------- n : int Number of items to retrieve. columns : list or str Column name or names to order by. keep : {'first', 'last', 'all'}, default 'first' Where there are duplicate values: - ``first`` : take the first occurrence. - ``last`` : take the last occurrence. - ``all`` : do not drop any duplicates, even it means selecting more than `n` items. .. versionadded:: 0.24.0 Returns ------- DataFrame See Also -------- DataFrame.nlargest : Return the first `n` rows ordered by `columns` in descending order. DataFrame.sort_values : Sort DataFrame by the values. DataFrame.head : Return the first `n` rows without re-ordering. Examples -------- >>> df = pd.DataFrame({'population': [59000000, 65000000, 434000, ... 434000, 434000, 337000, 11300, ... 11300, 11300], ... 'GDP': [1937894, 2583560 , 12011, 4520, 12128, ... 17036, 182, 38, 311], ... 'alpha-2': ["IT", "FR", "MT", "MV", "BN", ... "IS", "NR", "TV", "AI"]}, ... index=["Italy", "France", "Malta", ... "Maldives", "Brunei", "Iceland", ... "Nauru", "Tuvalu", "Anguilla"]) >>> df population GDP alpha-2 Italy 59000000 1937894 IT France 65000000 2583560 FR Malta 434000 12011 MT Maldives 434000 4520 MV Brunei 434000 12128 BN Iceland 337000 17036 IS Nauru 11300 182 NR Tuvalu 11300 38 TV Anguilla 11300 311 AI In the following example, we will use ``nsmallest`` to select the three rows having the smallest values in column "a". >>> df.nsmallest(3, 'population') population GDP alpha-2 Nauru 11300 182 NR Tuvalu 11300 38 TV Anguilla 11300 311 AI When using ``keep='last'``, ties are resolved in reverse order: >>> df.nsmallest(3, 'population', keep='last') population GDP alpha-2 Anguilla 11300 311 AI Tuvalu 11300 38 TV Nauru 11300 182 NR When using ``keep='all'``, all duplicate items are maintained: >>> df.nsmallest(3, 'population', keep='all') population GDP alpha-2 Nauru 11300 182 NR Tuvalu 11300 38 TV Anguilla 11300 311 AI To order by the largest values in column "a" and then "c", we can specify multiple columns like in the next example. >>> df.nsmallest(3, ['population', 'GDP']) population GDP alpha-2 Tuvalu 11300 38 TV Nauru 11300 182 NR Anguilla 11300 311 AI """ return algorithms.SelectNFrame(self, n=n, keep=keep, columns=columns).nsmallest() def swaplevel(self, i=-2, j=-1, axis=0): """ Swap levels i and j in a MultiIndex on a particular axis. Parameters ---------- i, j : int, string (can be mixed) Level of index to be swapped. Can pass level name as string. Returns ------- DataFrame .. versionchanged:: 0.18.1 The indexes ``i`` and ``j`` are now optional, and default to the two innermost levels of the index. """ result = self.copy() axis = self._get_axis_number(axis) if axis == 0: result.index = result.index.swaplevel(i, j) else: result.columns = result.columns.swaplevel(i, j) return result def reorder_levels(self, order, axis=0): """ Rearrange index levels using input order. May not drop or duplicate levels. Parameters ---------- order : list of int or list of str List representing new level order. Reference level by number (position) or by key (label). axis : int Where to reorder levels. Returns ------- type of caller (new object) """ axis = self._get_axis_number(axis) if not isinstance(self._get_axis(axis), MultiIndex): # pragma: no cover raise TypeError('Can only reorder levels on a hierarchical axis.') result = self.copy() if axis == 0: result.index = result.index.reorder_levels(order) else: result.columns = result.columns.reorder_levels(order) return result # ---------------------------------------------------------------------- # Arithmetic / combination related def _combine_frame(self, other, func, fill_value=None, level=None): this, other = self.align(other, join='outer', level=level, copy=False) new_index, new_columns = this.index, this.columns def _arith_op(left, right): # for the mixed_type case where we iterate over columns, # _arith_op(left, right) is equivalent to # left._binop(right, func, fill_value=fill_value) left, right = ops.fill_binop(left, right, fill_value) return func(left, right) if ops.should_series_dispatch(this, other, func): # iterate over columns return ops.dispatch_to_series(this, other, _arith_op) else: result = _arith_op(this.values, other.values) return self._constructor(result, index=new_index, columns=new_columns, copy=False) def _combine_match_index(self, other, func, level=None): left, right = self.align(other, join='outer', axis=0, level=level, copy=False) assert left.index.equals(right.index) if left._is_mixed_type or right._is_mixed_type: # operate column-wise; avoid costly object-casting in `.values` return ops.dispatch_to_series(left, right, func) else: # fastpath --> operate directly on values with np.errstate(all="ignore"): new_data = func(left.values.T, right.values).T return self._constructor(new_data, index=left.index, columns=self.columns, copy=False) def _combine_match_columns(self, other, func, level=None): assert isinstance(other, Series) left, right = self.align(other, join='outer', axis=1, level=level, copy=False) assert left.columns.equals(right.index) return ops.dispatch_to_series(left, right, func, axis="columns") def _combine_const(self, other, func): assert lib.is_scalar(other) or np.ndim(other) == 0 return ops.dispatch_to_series(self, other, func) def combine(self, other, func, fill_value=None, overwrite=True): """ Perform column-wise combine with another DataFrame. Combines a DataFrame with `other` DataFrame using `func` to element-wise combine columns. The row and column indexes of the resulting DataFrame will be the union of the two. Parameters ---------- other : DataFrame The DataFrame to merge column-wise. func : function Function that takes two series as inputs and return a Series or a scalar. Used to merge the two dataframes column by columns. fill_value : scalar value, default None The value to fill NaNs with prior to passing any column to the merge func. overwrite : bool, default True If True, columns in `self` that do not exist in `other` will be overwritten with NaNs. Returns ------- DataFrame Combination of the provided DataFrames. See Also -------- DataFrame.combine_first : Combine two DataFrame objects and default to non-null values in frame calling the method. Examples -------- Combine using a simple function that chooses the smaller column. >>> df1 = pd.DataFrame({'A': [0, 0], 'B': [4, 4]}) >>> df2 = pd.DataFrame({'A': [1, 1], 'B': [3, 3]}) >>> take_smaller = lambda s1, s2: s1 if s1.sum() < s2.sum() else s2 >>> df1.combine(df2, take_smaller) A B 0 0 3 1 0 3 Example using a true element-wise combine function. >>> df1 = pd.DataFrame({'A': [5, 0], 'B': [2, 4]}) >>> df2 = pd.DataFrame({'A': [1, 1], 'B': [3, 3]}) >>> df1.combine(df2, np.minimum) A B 0 1 2 1 0 3 Using `fill_value` fills Nones prior to passing the column to the merge function. >>> df1 = pd.DataFrame({'A': [0, 0], 'B': [None, 4]}) >>> df2 = pd.DataFrame({'A': [1, 1], 'B': [3, 3]}) >>> df1.combine(df2, take_smaller, fill_value=-5) A B 0 0 -5.0 1 0 4.0 However, if the same element in both dataframes is None, that None is preserved >>> df1 = pd.DataFrame({'A': [0, 0], 'B': [None, 4]}) >>> df2 = pd.DataFrame({'A': [1, 1], 'B': [None, 3]}) >>> df1.combine(df2, take_smaller, fill_value=-5) A B 0 0 -5.0 1 0 3.0 Example that demonstrates the use of `overwrite` and behavior when the axis differ between the dataframes. >>> df1 = pd.DataFrame({'A': [0, 0], 'B': [4, 4]}) >>> df2 = pd.DataFrame({'B': [3, 3], 'C': [-10, 1], }, index=[1, 2]) >>> df1.combine(df2, take_smaller) A B C 0 NaN NaN NaN 1 NaN 3.0 -10.0 2 NaN 3.0 1.0 >>> df1.combine(df2, take_smaller, overwrite=False) A B C 0 0.0 NaN NaN 1 0.0 3.0 -10.0 2 NaN 3.0 1.0 Demonstrating the preference of the passed in dataframe. >>> df2 = pd.DataFrame({'B': [3, 3], 'C': [1, 1], }, index=[1, 2]) >>> df2.combine(df1, take_smaller) A B C 0 0.0 NaN NaN 1 0.0 3.0 NaN 2 NaN 3.0 NaN >>> df2.combine(df1, take_smaller, overwrite=False) A B C 0 0.0 NaN NaN 1 0.0 3.0 1.0 2 NaN 3.0 1.0 """ other_idxlen = len(other.index) # save for compare this, other = self.align(other, copy=False) new_index = this.index if other.empty and len(new_index) == len(self.index): return self.copy() if self.empty and len(other) == other_idxlen: return other.copy() # sorts if possible new_columns = this.columns.union(other.columns) do_fill = fill_value is not None result = {} for col in new_columns: series = this[col] otherSeries = other[col] this_dtype = series.dtype other_dtype = otherSeries.dtype this_mask = isna(series) other_mask = isna(otherSeries) # don't overwrite columns unnecessarily # DO propagate if this column is not in the intersection if not overwrite and other_mask.all(): result[col] = this[col].copy() continue if do_fill: series = series.copy() otherSeries = otherSeries.copy() series[this_mask] = fill_value otherSeries[other_mask] = fill_value if col not in self.columns: # If self DataFrame does not have col in other DataFrame, # try to promote series, which is all NaN, as other_dtype. new_dtype = other_dtype try: series = series.astype(new_dtype, copy=False) except ValueError: # e.g. new_dtype is integer types pass else: # if we have different dtypes, possibly promote new_dtype = find_common_type([this_dtype, other_dtype]) if not is_dtype_equal(this_dtype, new_dtype): series = series.astype(new_dtype) if not is_dtype_equal(other_dtype, new_dtype): otherSeries = otherSeries.astype(new_dtype) arr = func(series, otherSeries) arr = maybe_downcast_to_dtype(arr, this_dtype) result[col] = arr # convert_objects just in case return self._constructor(result, index=new_index, columns=new_columns) def combine_first(self, other): """ Update null elements with value in the same location in `other`. Combine two DataFrame objects by filling null values in one DataFrame with non-null values from other DataFrame. The row and column indexes of the resulting DataFrame will be the union of the two. Parameters ---------- other : DataFrame Provided DataFrame to use to fill null values. Returns ------- DataFrame See Also -------- DataFrame.combine : Perform series-wise operation on two DataFrames using a given function. Examples -------- >>> df1 = pd.DataFrame({'A': [None, 0], 'B': [None, 4]}) >>> df2 = pd.DataFrame({'A': [1, 1], 'B': [3, 3]}) >>> df1.combine_first(df2) A B 0 1.0 3.0 1 0.0 4.0 Null values still persist if the location of that null value does not exist in `other` >>> df1 = pd.DataFrame({'A': [None, 0], 'B': [4, None]}) >>> df2 = pd.DataFrame({'B': [3, 3], 'C': [1, 1]}, index=[1, 2]) >>> df1.combine_first(df2) A B C 0 NaN 4.0 NaN 1 0.0 3.0 1.0 2 NaN 3.0 1.0 """ import pandas.core.computation.expressions as expressions def extract_values(arr): # Does two things: # 1. maybe gets the values from the Series / Index # 2. convert datelike to i8 if isinstance(arr, (ABCIndexClass, ABCSeries)): arr = arr._values if needs_i8_conversion(arr): if is_extension_array_dtype(arr.dtype): arr = arr.asi8 else: arr = arr.view('i8') return arr def combiner(x, y): mask = isna(x) if isinstance(mask, (ABCIndexClass, ABCSeries)): mask = mask._values x_values = extract_values(x) y_values = extract_values(y) # If the column y in other DataFrame is not in first DataFrame, # just return y_values. if y.name not in self.columns: return y_values return expressions.where(mask, y_values, x_values) return self.combine(other, combiner, overwrite=False) @deprecate_kwarg(old_arg_name='raise_conflict', new_arg_name='errors', mapping={False: 'ignore', True: 'raise'}) def update(self, other, join='left', overwrite=True, filter_func=None, errors='ignore'): """ Modify in place using non-NA values from another DataFrame. Aligns on indices. There is no return value. Parameters ---------- other : DataFrame, or object coercible into a DataFrame Should have at least one matching index/column label with the original DataFrame. If a Series is passed, its name attribute must be set, and that will be used as the column name to align with the original DataFrame. join : {'left'}, default 'left' Only left join is implemented, keeping the index and columns of the original object. overwrite : bool, default True How to handle non-NA values for overlapping keys: * True: overwrite original DataFrame's values with values from `other`. * False: only update values that are NA in the original DataFrame. filter_func : callable(1d-array) -> bool 1d-array, optional Can choose to replace values other than NA. Return True for values that should be updated. errors : {'raise', 'ignore'}, default 'ignore' If 'raise', will raise a ValueError if the DataFrame and `other` both contain non-NA data in the same place. .. versionchanged :: 0.24.0 Changed from `raise_conflict=False|True` to `errors='ignore'|'raise'`. Returns ------- None : method directly changes calling object Raises ------ ValueError * When `errors='raise'` and there's overlapping non-NA data. * When `errors` is not either `'ignore'` or `'raise'` NotImplementedError * If `join != 'left'` See Also -------- dict.update : Similar method for dictionaries. DataFrame.merge : For column(s)-on-columns(s) operations. Examples -------- >>> df = pd.DataFrame({'A': [1, 2, 3], ... 'B': [400, 500, 600]}) >>> new_df = pd.DataFrame({'B': [4, 5, 6], ... 'C': [7, 8, 9]}) >>> df.update(new_df) >>> df A B 0 1 4 1 2 5 2 3 6 The DataFrame's length does not increase as a result of the update, only values at matching index/column labels are updated. >>> df = pd.DataFrame({'A': ['a', 'b', 'c'], ... 'B': ['x', 'y', 'z']}) >>> new_df = pd.DataFrame({'B': ['d', 'e', 'f', 'g', 'h', 'i']}) >>> df.update(new_df) >>> df A B 0 a d 1 b e 2 c f For Series, it's name attribute must be set. >>> df = pd.DataFrame({'A': ['a', 'b', 'c'], ... 'B': ['x', 'y', 'z']}) >>> new_column = pd.Series(['d', 'e'], name='B', index=[0, 2]) >>> df.update(new_column) >>> df A B 0 a d 1 b y 2 c e >>> df = pd.DataFrame({'A': ['a', 'b', 'c'], ... 'B': ['x', 'y', 'z']}) >>> new_df = pd.DataFrame({'B': ['d', 'e']}, index=[1, 2]) >>> df.update(new_df) >>> df A B 0 a x 1 b d 2 c e If `other` contains NaNs the corresponding values are not updated in the original dataframe. >>> df = pd.DataFrame({'A': [1, 2, 3], ... 'B': [400, 500, 600]}) >>> new_df = pd.DataFrame({'B': [4, np.nan, 6]}) >>> df.update(new_df) >>> df A B 0 1 4.0 1 2 500.0 2 3 6.0 """ import pandas.core.computation.expressions as expressions # TODO: Support other joins if join != 'left': # pragma: no cover raise NotImplementedError("Only left join is supported") if errors not in ['ignore', 'raise']: raise ValueError("The parameter errors must be either " "'ignore' or 'raise'") if not isinstance(other, DataFrame): other = DataFrame(other) other = other.reindex_like(self) for col in self.columns: this = self[col]._values that = other[col]._values if filter_func is not None: with np.errstate(all='ignore'): mask = ~filter_func(this) | isna(that) else: if errors == 'raise': mask_this = notna(that) mask_that = notna(this) if any(mask_this & mask_that): raise ValueError("Data overlaps.") if overwrite: mask = isna(that) else: mask = notna(this) # don't overwrite columns unnecessarily if mask.all(): continue self[col] = expressions.where(mask, this, that) # ---------------------------------------------------------------------- # Data reshaping _shared_docs['pivot'] = """ Return reshaped DataFrame organized by given index / column values. Reshape data (produce a "pivot" table) based on column values. Uses unique values from specified `index` / `columns` to form axes of the resulting DataFrame. This function does not support data aggregation, multiple values will result in a MultiIndex in the columns. See the :ref:`User Guide <reshaping>` for more on reshaping. Parameters ----------%s index : string or object, optional Column to use to make new frame's index. If None, uses existing index. columns : string or object Column to use to make new frame's columns. values : string, object or a list of the previous, optional Column(s) to use for populating new frame's values. If not specified, all remaining columns will be used and the result will have hierarchically indexed columns. .. versionchanged :: 0.23.0 Also accept list of column names. Returns ------- DataFrame Returns reshaped DataFrame. Raises ------ ValueError: When there are any `index`, `columns` combinations with multiple values. `DataFrame.pivot_table` when you need to aggregate. See Also -------- DataFrame.pivot_table : Generalization of pivot that can handle duplicate values for one index/column pair. DataFrame.unstack : Pivot based on the index values instead of a column. Notes ----- For finer-tuned control, see hierarchical indexing documentation along with the related stack/unstack methods. Examples -------- >>> df = pd.DataFrame({'foo': ['one', 'one', 'one', 'two', 'two', ... 'two'], ... 'bar': ['A', 'B', 'C', 'A', 'B', 'C'], ... 'baz': [1, 2, 3, 4, 5, 6], ... 'zoo': ['x', 'y', 'z', 'q', 'w', 't']}) >>> df foo bar baz zoo 0 one A 1 x 1 one B 2 y 2 one C 3 z 3 two A 4 q 4 two B 5 w 5 two C 6 t >>> df.pivot(index='foo', columns='bar', values='baz') bar A B C foo one 1 2 3 two 4 5 6 >>> df.pivot(index='foo', columns='bar')['baz'] bar A B C foo one 1 2 3 two 4 5 6 >>> df.pivot(index='foo', columns='bar', values=['baz', 'zoo']) baz zoo bar A B C A B C foo one 1 2 3 x y z two 4 5 6 q w t A ValueError is raised if there are any duplicates. >>> df = pd.DataFrame({"foo": ['one', 'one', 'two', 'two'], ... "bar": ['A', 'A', 'B', 'C'], ... "baz": [1, 2, 3, 4]}) >>> df foo bar baz 0 one A 1 1 one A 2 2 two B 3 3 two C 4 Notice that the first two rows are the same for our `index` and `columns` arguments. >>> df.pivot(index='foo', columns='bar', values='baz') Traceback (most recent call last): ... ValueError: Index contains duplicate entries, cannot reshape """ @Substitution('') @Appender(_shared_docs['pivot']) def pivot(self, index=None, columns=None, values=None): from pandas.core.reshape.pivot import pivot return pivot(self, index=index, columns=columns, values=values) _shared_docs['pivot_table'] = """ Create a spreadsheet-style pivot table as a DataFrame. The levels in the pivot table will be stored in MultiIndex objects (hierarchical indexes) on the index and columns of the result DataFrame. Parameters ----------%s values : column to aggregate, optional index : column, Grouper, array, or list of the previous If an array is passed, it must be the same length as the data. The list can contain any of the other types (except list). Keys to group by on the pivot table index. If an array is passed, it is being used as the same manner as column values. columns : column, Grouper, array, or list of the previous If an array is passed, it must be the same length as the data. The list can contain any of the other types (except list). Keys to group by on the pivot table column. If an array is passed, it is being used as the same manner as column values. aggfunc : function, list of functions, dict, default numpy.mean If list of functions passed, the resulting pivot table will have hierarchical columns whose top level are the function names (inferred from the function objects themselves) If dict is passed, the key is column to aggregate and value is function or list of functions fill_value : scalar, default None Value to replace missing values with margins : boolean, default False Add all row / columns (e.g. for subtotal / grand totals) dropna : boolean, default True Do not include columns whose entries are all NaN margins_name : string, default 'All' Name of the row / column that will contain the totals when margins is True. observed : boolean, default False This only applies if any of the groupers are Categoricals. If True: only show observed values for categorical groupers. If False: show all values for categorical groupers. .. versionchanged :: 0.25.0 Returns ------- DataFrame See Also -------- DataFrame.pivot : Pivot without aggregation that can handle non-numeric data. Examples -------- >>> df = pd.DataFrame({"A": ["foo", "foo", "foo", "foo", "foo", ... "bar", "bar", "bar", "bar"], ... "B": ["one", "one", "one", "two", "two", ... "one", "one", "two", "two"], ... "C": ["small", "large", "large", "small", ... "small", "large", "small", "small", ... "large"], ... "D": [1, 2, 2, 3, 3, 4, 5, 6, 7], ... "E": [2, 4, 5, 5, 6, 6, 8, 9, 9]}) >>> df A B C D E 0 foo one small 1 2 1 foo one large 2 4 2 foo one large 2 5 3 foo two small 3 5 4 foo two small 3 6 5 bar one large 4 6 6 bar one small 5 8 7 bar two small 6 9 8 bar two large 7 9 This first example aggregates values by taking the sum. >>> table = pd.pivot_table(df, values='D', index=['A', 'B'], ... columns=['C'], aggfunc=np.sum) >>> table C large small A B bar one 4.0 5.0 two 7.0 6.0 foo one 4.0 1.0 two NaN 6.0 We can also fill missing values using the `fill_value` parameter. >>> table = pd.pivot_table(df, values='D', index=['A', 'B'], ... columns=['C'], aggfunc=np.sum, fill_value=0) >>> table C large small A B bar one 4 5 two 7 6 foo one 4 1 two 0 6 The next example aggregates by taking the mean across multiple columns. >>> table = pd.pivot_table(df, values=['D', 'E'], index=['A', 'C'], ... aggfunc={'D': np.mean, ... 'E': np.mean}) >>> table D E A C bar large 5.500000 7.500000 small 5.500000 8.500000 foo large 2.000000 4.500000 small 2.333333 4.333333 We can also calculate multiple types of aggregations for any given value column. >>> table = pd.pivot_table(df, values=['D', 'E'], index=['A', 'C'], ... aggfunc={'D': np.mean, ... 'E': [min, max, np.mean]}) >>> table D E mean max mean min A C bar large 5.500000 9.0 7.500000 6.0 small 5.500000 9.0 8.500000 8.0 foo large 2.000000 5.0 4.500000 4.0 small 2.333333 6.0 4.333333 2.0 """ @Substitution('') @Appender(_shared_docs['pivot_table']) def pivot_table(self, values=None, index=None, columns=None, aggfunc='mean', fill_value=None, margins=False, dropna=True, margins_name='All', observed=False): from pandas.core.reshape.pivot import pivot_table return pivot_table(self, values=values, index=index, columns=columns, aggfunc=aggfunc, fill_value=fill_value, margins=margins, dropna=dropna, margins_name=margins_name, observed=observed) def stack(self, level=-1, dropna=True): """ Stack the prescribed level(s) from columns to index. Return a reshaped DataFrame or Series having a multi-level index with one or more new inner-most levels compared to the current DataFrame. The new inner-most levels are created by pivoting the columns of the current dataframe: - if the columns have a single level, the output is a Series; - if the columns have multiple levels, the new index level(s) is (are) taken from the prescribed level(s) and the output is a DataFrame. The new index levels are sorted. Parameters ---------- level : int, str, list, default -1 Level(s) to stack from the column axis onto the index axis, defined as one index or label, or a list of indices or labels. dropna : bool, default True Whether to drop rows in the resulting Frame/Series with missing values. Stacking a column level onto the index axis can create combinations of index and column values that are missing from the original dataframe. See Examples section. Returns ------- DataFrame or Series Stacked dataframe or series. See Also -------- DataFrame.unstack : Unstack prescribed level(s) from index axis onto column axis. DataFrame.pivot : Reshape dataframe from long format to wide format. DataFrame.pivot_table : Create a spreadsheet-style pivot table as a DataFrame. Notes ----- The function is named by analogy with a collection of books being reorganized from being side by side on a horizontal position (the columns of the dataframe) to being stacked vertically on top of each other (in the index of the dataframe). Examples -------- **Single level columns** >>> df_single_level_cols = pd.DataFrame([[0, 1], [2, 3]], ... index=['cat', 'dog'], ... columns=['weight', 'height']) Stacking a dataframe with a single level column axis returns a Series: >>> df_single_level_cols weight height cat 0 1 dog 2 3 >>> df_single_level_cols.stack() cat weight 0 height 1 dog weight 2 height 3 dtype: int64 **Multi level columns: simple case** >>> multicol1 = pd.MultiIndex.from_tuples([('weight', 'kg'), ... ('weight', 'pounds')]) >>> df_multi_level_cols1 = pd.DataFrame([[1, 2], [2, 4]], ... index=['cat', 'dog'], ... columns=multicol1) Stacking a dataframe with a multi-level column axis: >>> df_multi_level_cols1 weight kg pounds cat 1 2 dog 2 4 >>> df_multi_level_cols1.stack() weight cat kg 1 pounds 2 dog kg 2 pounds 4 **Missing values** >>> multicol2 = pd.MultiIndex.from_tuples([('weight', 'kg'), ... ('height', 'm')]) >>> df_multi_level_cols2 = pd.DataFrame([[1.0, 2.0], [3.0, 4.0]], ... index=['cat', 'dog'], ... columns=multicol2) It is common to have missing values when stacking a dataframe with multi-level columns, as the stacked dataframe typically has more values than the original dataframe. Missing values are filled with NaNs: >>> df_multi_level_cols2 weight height kg m cat 1.0 2.0 dog 3.0 4.0 >>> df_multi_level_cols2.stack() height weight cat kg NaN 1.0 m 2.0 NaN dog kg NaN 3.0 m 4.0 NaN **Prescribing the level(s) to be stacked** The first parameter controls which level or levels are stacked: >>> df_multi_level_cols2.stack(0) kg m cat height NaN 2.0 weight 1.0 NaN dog height NaN 4.0 weight 3.0 NaN >>> df_multi_level_cols2.stack([0, 1]) cat height m 2.0 weight kg 1.0 dog height m 4.0 weight kg 3.0 dtype: float64 **Dropping missing values** >>> df_multi_level_cols3 = pd.DataFrame([[None, 1.0], [2.0, 3.0]], ... index=['cat', 'dog'], ... columns=multicol2) Note that rows where all values are missing are dropped by default but this behaviour can be controlled via the dropna keyword parameter: >>> df_multi_level_cols3 weight height kg m cat NaN 1.0 dog 2.0 3.0 >>> df_multi_level_cols3.stack(dropna=False) height weight cat kg NaN NaN m 1.0 NaN dog kg NaN 2.0 m 3.0 NaN >>> df_multi_level_cols3.stack(dropna=True) height weight cat m 1.0 NaN dog kg NaN 2.0 m 3.0 NaN """ from pandas.core.reshape.reshape import stack, stack_multiple if isinstance(level, (tuple, list)): return stack_multiple(self, level, dropna=dropna) else: return stack(self, level, dropna=dropna) def unstack(self, level=-1, fill_value=None): """ Pivot a level of the (necessarily hierarchical) index labels, returning a DataFrame having a new level of column labels whose inner-most level consists of the pivoted index labels. If the index is not a MultiIndex, the output will be a Series (the analogue of stack when the columns are not a MultiIndex). The level involved will automatically get sorted. Parameters ---------- level : int, string, or list of these, default -1 (last level) Level(s) of index to unstack, can pass level name fill_value : replace NaN with this value if the unstack produces missing values .. versionadded:: 0.18.0 Returns ------- Series or DataFrame See Also -------- DataFrame.pivot : Pivot a table based on column values. DataFrame.stack : Pivot a level of the column labels (inverse operation from `unstack`). Examples -------- >>> index = pd.MultiIndex.from_tuples([('one', 'a'), ('one', 'b'), ... ('two', 'a'), ('two', 'b')]) >>> s = pd.Series(np.arange(1.0, 5.0), index=index) >>> s one a 1.0 b 2.0 two a 3.0 b 4.0 dtype: float64 >>> s.unstack(level=-1) a b one 1.0 2.0 two 3.0 4.0 >>> s.unstack(level=0) one two a 1.0 3.0 b 2.0 4.0 >>> df = s.unstack(level=0) >>> df.unstack() one a 1.0 b 2.0 two a 3.0 b 4.0 dtype: float64 """ from pandas.core.reshape.reshape import unstack return unstack(self, level, fill_value) _shared_docs['melt'] = (""" Unpivot a DataFrame from wide format to long format, optionally leaving identifier variables set. This function is useful to massage a DataFrame into a format where one or more columns are identifier variables (`id_vars`), while all other columns, considered measured variables (`value_vars`), are "unpivoted" to the row axis, leaving just two non-identifier columns, 'variable' and 'value'. %(versionadded)s Parameters ---------- frame : DataFrame id_vars : tuple, list, or ndarray, optional Column(s) to use as identifier variables. value_vars : tuple, list, or ndarray, optional Column(s) to unpivot. If not specified, uses all columns that are not set as `id_vars`. var_name : scalar Name to use for the 'variable' column. If None it uses ``frame.columns.name`` or 'variable'. value_name : scalar, default 'value' Name to use for the 'value' column. col_level : int or string, optional If columns are a MultiIndex then use this level to melt. Returns ------- DataFrame Unpivoted DataFrame. See Also -------- %(other)s pivot_table DataFrame.pivot Examples -------- >>> df = pd.DataFrame({'A': {0: 'a', 1: 'b', 2: 'c'}, ... 'B': {0: 1, 1: 3, 2: 5}, ... 'C': {0: 2, 1: 4, 2: 6}}) >>> df A B C 0 a 1 2 1 b 3 4 2 c 5 6 >>> %(caller)sid_vars=['A'], value_vars=['B']) A variable value 0 a B 1 1 b B 3 2 c B 5 >>> %(caller)sid_vars=['A'], value_vars=['B', 'C']) A variable value 0 a B 1 1 b B 3 2 c B 5 3 a C 2 4 b C 4 5 c C 6 The names of 'variable' and 'value' columns can be customized: >>> %(caller)sid_vars=['A'], value_vars=['B'], ... var_name='myVarname', value_name='myValname') A myVarname myValname 0 a B 1 1 b B 3 2 c B 5 If you have multi-index columns: >>> df.columns = [list('ABC'), list('DEF')] >>> df A B C D E F 0 a 1 2 1 b 3 4 2 c 5 6 >>> %(caller)scol_level=0, id_vars=['A'], value_vars=['B']) A variable value 0 a B 1 1 b B 3 2 c B 5 >>> %(caller)sid_vars=[('A', 'D')], value_vars=[('B', 'E')]) (A, D) variable_0 variable_1 value 0 a B E 1 1 b B E 3 2 c B E 5 """) @Appender(_shared_docs['melt'] % dict(caller='df.melt(', versionadded='.. versionadded:: 0.20.0\n', other='melt')) def melt(self, id_vars=None, value_vars=None, var_name=None, value_name='value', col_level=None): from pandas.core.reshape.melt import melt return melt(self, id_vars=id_vars, value_vars=value_vars, var_name=var_name, value_name=value_name, col_level=col_level) # ---------------------------------------------------------------------- # Time series-related def diff(self, periods=1, axis=0): """ First discrete difference of element. Calculates the difference of a DataFrame element compared with another element in the DataFrame (default is the element in the same column of the previous row). Parameters ---------- periods : int, default 1 Periods to shift for calculating difference, accepts negative values. axis : {0 or 'index', 1 or 'columns'}, default 0 Take difference over rows (0) or columns (1). .. versionadded:: 0.16.1. Returns ------- DataFrame See Also -------- Series.diff: First discrete difference for a Series. DataFrame.pct_change: Percent change over given number of periods. DataFrame.shift: Shift index by desired number of periods with an optional time freq. Examples -------- Difference with previous row >>> df = pd.DataFrame({'a': [1, 2, 3, 4, 5, 6], ... 'b': [1, 1, 2, 3, 5, 8], ... 'c': [1, 4, 9, 16, 25, 36]}) >>> df a b c 0 1 1 1 1 2 1 4 2 3 2 9 3 4 3 16 4 5 5 25 5 6 8 36 >>> df.diff() a b c 0 NaN NaN NaN 1 1.0 0.0 3.0 2 1.0 1.0 5.0 3 1.0 1.0 7.0 4 1.0 2.0 9.0 5 1.0 3.0 11.0 Difference with previous column >>> df.diff(axis=1) a b c 0 NaN 0.0 0.0 1 NaN -1.0 3.0 2 NaN -1.0 7.0 3 NaN -1.0 13.0 4 NaN 0.0 20.0 5 NaN 2.0 28.0 Difference with 3rd previous row >>> df.diff(periods=3) a b c 0 NaN NaN NaN 1 NaN NaN NaN 2 NaN NaN NaN 3 3.0 2.0 15.0 4 3.0 4.0 21.0 5 3.0 6.0 27.0 Difference with following row >>> df.diff(periods=-1) a b c 0 -1.0 0.0 -3.0 1 -1.0 -1.0 -5.0 2 -1.0 -1.0 -7.0 3 -1.0 -2.0 -9.0 4 -1.0 -3.0 -11.0 5 NaN NaN NaN """ bm_axis = self._get_block_manager_axis(axis) new_data = self._data.diff(n=periods, axis=bm_axis) return self._constructor(new_data) # ---------------------------------------------------------------------- # Function application def _gotitem(self, key: Union[str, List[str]], ndim: int, subset: Optional[Union[Series, ABCDataFrame]] = None, ) -> Union[Series, ABCDataFrame]: """ Sub-classes to define. Return a sliced object. Parameters ---------- key : string / list of selections ndim : 1,2 requested ndim of result subset : object, default None subset to act on """ if subset is None: subset = self elif subset.ndim == 1: # is Series return subset # TODO: _shallow_copy(subset)? return subset[key] _agg_summary_and_see_also_doc = dedent(""" The aggregation operations are always performed over an axis, either the index (default) or the column axis. This behavior is different from `numpy` aggregation functions (`mean`, `median`, `prod`, `sum`, `std`, `var`), where the default is to compute the aggregation of the flattened array, e.g., ``numpy.mean(arr_2d)`` as opposed to ``numpy.mean(arr_2d, axis=0)``. `agg` is an alias for `aggregate`. Use the alias. See Also -------- DataFrame.apply : Perform any type of operations. DataFrame.transform : Perform transformation type operations. core.groupby.GroupBy : Perform operations over groups. core.resample.Resampler : Perform operations over resampled bins. core.window.Rolling : Perform operations over rolling window. core.window.Expanding : Perform operations over expanding window. core.window.EWM : Perform operation over exponential weighted window. """) _agg_examples_doc = dedent(""" Examples -------- >>> df = pd.DataFrame([[1, 2, 3], ... [4, 5, 6], ... [7, 8, 9], ... [np.nan, np.nan, np.nan]], ... columns=['A', 'B', 'C']) Aggregate these functions over the rows. >>> df.agg(['sum', 'min']) A B C sum 12.0 15.0 18.0 min 1.0 2.0 3.0 Different aggregations per column. >>> df.agg({'A' : ['sum', 'min'], 'B' : ['min', 'max']}) A B max NaN 8.0 min 1.0 2.0 sum 12.0 NaN Aggregate over the columns. >>> df.agg("mean", axis="columns") 0 2.0 1 5.0 2 8.0 3 NaN dtype: float64 """) @Substitution(see_also=_agg_summary_and_see_also_doc, examples=_agg_examples_doc, versionadded='\n.. versionadded:: 0.20.0\n', **_shared_doc_kwargs) @Appender(_shared_docs['aggregate']) def aggregate(self, func, axis=0, *args, **kwargs): axis = self._get_axis_number(axis) result = None try: result, how = self._aggregate(func, axis=axis, *args, **kwargs) except TypeError: pass if result is None: return self.apply(func, axis=axis, args=args, **kwargs) return result def _aggregate(self, arg, axis=0, *args, **kwargs): if axis == 1: # NDFrame.aggregate returns a tuple, and we need to transpose # only result result, how = self.T._aggregate(arg, *args, **kwargs) result = result.T if result is not None else result return result, how return super()._aggregate(arg, *args, **kwargs) agg = aggregate @Appender(_shared_docs['transform'] % _shared_doc_kwargs) def transform(self, func, axis=0, *args, **kwargs): axis = self._get_axis_number(axis) if axis == 1: return self.T.transform(func, *args, **kwargs).T return super().transform(func, *args, **kwargs) def apply(self, func, axis=0, broadcast=None, raw=False, reduce=None, result_type=None, args=(), **kwds): """ Apply a function along an axis of the DataFrame. Objects passed to the function are Series objects whose index is either the DataFrame's index (``axis=0``) or the DataFrame's columns (``axis=1``). By default (``result_type=None``), the final return type is inferred from the return type of the applied function. Otherwise, it depends on the `result_type` argument. Parameters ---------- func : function Function to apply to each column or row. axis : {0 or 'index', 1 or 'columns'}, default 0 Axis along which the function is applied: * 0 or 'index': apply function to each column. * 1 or 'columns': apply function to each row. broadcast : bool, optional Only relevant for aggregation functions: * ``False`` or ``None`` : returns a Series whose length is the length of the index or the number of columns (based on the `axis` parameter) * ``True`` : results will be broadcast to the original shape of the frame, the original index and columns will be retained. .. deprecated:: 0.23.0 This argument will be removed in a future version, replaced by result_type='broadcast'. raw : bool, default False * ``False`` : passes each row or column as a Series to the function. * ``True`` : the passed function will receive ndarray objects instead. If you are just applying a NumPy reduction function this will achieve much better performance. reduce : bool or None, default None Try to apply reduction procedures. If the DataFrame is empty, `apply` will use `reduce` to determine whether the result should be a Series or a DataFrame. If ``reduce=None`` (the default), `apply`'s return value will be guessed by calling `func` on an empty Series (note: while guessing, exceptions raised by `func` will be ignored). If ``reduce=True`` a Series will always be returned, and if ``reduce=False`` a DataFrame will always be returned. .. deprecated:: 0.23.0 This argument will be removed in a future version, replaced by ``result_type='reduce'``. result_type : {'expand', 'reduce', 'broadcast', None}, default None These only act when ``axis=1`` (columns): * 'expand' : list-like results will be turned into columns. * 'reduce' : returns a Series if possible rather than expanding list-like results. This is the opposite of 'expand'. * 'broadcast' : results will be broadcast to the original shape of the DataFrame, the original index and columns will be retained. The default behaviour (None) depends on the return value of the applied function: list-like results will be returned as a Series of those. However if the apply function returns a Series these are expanded to columns. .. versionadded:: 0.23.0 args : tuple Positional arguments to pass to `func` in addition to the array/series. **kwds Additional keyword arguments to pass as keywords arguments to `func`. Returns ------- Series or DataFrame Result of applying ``func`` along the given axis of the DataFrame. See Also -------- DataFrame.applymap: For elementwise operations. DataFrame.aggregate: Only perform aggregating type operations. DataFrame.transform: Only perform transforming type operations. Notes ----- In the current implementation apply calls `func` twice on the first column/row to decide whether it can take a fast or slow code path. This can lead to unexpected behavior if `func` has side-effects, as they will take effect twice for the first column/row. Examples -------- >>> df = pd.DataFrame([[4, 9]] * 3, columns=['A', 'B']) >>> df A B 0 4 9 1 4 9 2 4 9 Using a numpy universal function (in this case the same as ``np.sqrt(df)``): >>> df.apply(np.sqrt) A B 0 2.0 3.0 1 2.0 3.0 2 2.0 3.0 Using a reducing function on either axis >>> df.apply(np.sum, axis=0) A 12 B 27 dtype: int64 >>> df.apply(np.sum, axis=1) 0 13 1 13 2 13 dtype: int64 Returning a list-like will result in a Series >>> df.apply(lambda x: [1, 2], axis=1) 0 [1, 2] 1 [1, 2] 2 [1, 2] dtype: object Passing result_type='expand' will expand list-like results to columns of a Dataframe >>> df.apply(lambda x: [1, 2], axis=1, result_type='expand') 0 1 0 1 2 1 1 2 2 1 2 Returning a Series inside the function is similar to passing ``result_type='expand'``. The resulting column names will be the Series index. >>> df.apply(lambda x: pd.Series([1, 2], index=['foo', 'bar']), axis=1) foo bar 0 1 2 1 1 2 2 1 2 Passing ``result_type='broadcast'`` will ensure the same shape result, whether list-like or scalar is returned by the function, and broadcast it along the axis. The resulting column names will be the originals. >>> df.apply(lambda x: [1, 2], axis=1, result_type='broadcast') A B 0 1 2 1 1 2 2 1 2 """ from pandas.core.apply import frame_apply op = frame_apply(self, func=func, axis=axis, broadcast=broadcast, raw=raw, reduce=reduce, result_type=result_type, args=args, kwds=kwds) return op.get_result() def applymap(self, func): """ Apply a function to a Dataframe elementwise. This method applies a function that accepts and returns a scalar to every element of a DataFrame. Parameters ---------- func : callable Python function, returns a single value from a single value. Returns ------- DataFrame Transformed DataFrame. See Also -------- DataFrame.apply : Apply a function along input axis of DataFrame. Notes ----- In the current implementation applymap calls `func` twice on the first column/row to decide whether it can take a fast or slow code path. This can lead to unexpected behavior if `func` has side-effects, as they will take effect twice for the first column/row. Examples -------- >>> df = pd.DataFrame([[1, 2.12], [3.356, 4.567]]) >>> df 0 1 0 1.000 2.120 1 3.356 4.567 >>> df.applymap(lambda x: len(str(x))) 0 1 0 3 4 1 5 5 Note that a vectorized version of `func` often exists, which will be much faster. You could square each number elementwise. >>> df.applymap(lambda x: x**2) 0 1 0 1.000000 4.494400 1 11.262736 20.857489 But it's better to avoid applymap in that case. >>> df ** 2 0 1 0 1.000000 4.494400 1 11.262736 20.857489 """ # if we have a dtype == 'M8[ns]', provide boxed values def infer(x): if x.empty: return lib.map_infer(x, func) return lib.map_infer(x.astype(object).values, func) return self.apply(infer) # ---------------------------------------------------------------------- # Merging / joining methods def append(self, other, ignore_index=False, verify_integrity=False, sort=None): """ Append rows of `other` to the end of caller, returning a new object. Columns in `other` that are not in the caller are added as new columns. Parameters ---------- other : DataFrame or Series/dict-like object, or list of these The data to append. ignore_index : boolean, default False If True, do not use the index labels. verify_integrity : boolean, default False If True, raise ValueError on creating index with duplicates. sort : boolean, default None Sort columns if the columns of `self` and `other` are not aligned. The default sorting is deprecated and will change to not-sorting in a future version of pandas. Explicitly pass ``sort=True`` to silence the warning and sort. Explicitly pass ``sort=False`` to silence the warning and not sort. .. versionadded:: 0.23.0 Returns ------- DataFrame See Also -------- concat : General function to concatenate DataFrame or Series objects. Notes ----- If a list of dict/series is passed and the keys are all contained in the DataFrame's index, the order of the columns in the resulting DataFrame will be unchanged. Iteratively appending rows to a DataFrame can be more computationally intensive than a single concatenate. A better solution is to append those rows to a list and then concatenate the list with the original DataFrame all at once. Examples -------- >>> df = pd.DataFrame([[1, 2], [3, 4]], columns=list('AB')) >>> df A B 0 1 2 1 3 4 >>> df2 = pd.DataFrame([[5, 6], [7, 8]], columns=list('AB')) >>> df.append(df2) A B 0 1 2 1 3 4 0 5 6 1 7 8 With `ignore_index` set to True: >>> df.append(df2, ignore_index=True) A B 0 1 2 1 3 4 2 5 6 3 7 8 The following, while not recommended methods for generating DataFrames, show two ways to generate a DataFrame from multiple data sources. Less efficient: >>> df = pd.DataFrame(columns=['A']) >>> for i in range(5): ... df = df.append({'A': i}, ignore_index=True) >>> df A 0 0 1 1 2 2 3 3 4 4 More efficient: >>> pd.concat([pd.DataFrame([i], columns=['A']) for i in range(5)], ... ignore_index=True) A 0 0 1 1 2 2 3 3 4 4 """ if isinstance(other, (Series, dict)): if isinstance(other, dict): other = Series(other) if other.name is None and not ignore_index: raise TypeError('Can only append a Series if ignore_index=True' ' or if the Series has a name') if other.name is None: index = None else: # other must have the same index name as self, otherwise # index name will be reset index = Index([other.name], name=self.index.name) idx_diff = other.index.difference(self.columns) try: combined_columns = self.columns.append(idx_diff) except TypeError: combined_columns = self.columns.astype(object).append(idx_diff) other = other.reindex(combined_columns, copy=False) other = DataFrame(other.values.reshape((1, len(other))), index=index, columns=combined_columns) other = other._convert(datetime=True, timedelta=True) if not self.columns.equals(combined_columns): self = self.reindex(columns=combined_columns) elif isinstance(other, list) and not isinstance(other[0], DataFrame): other = DataFrame(other) if (self.columns.get_indexer(other.columns) >= 0).all(): other = other.reindex(columns=self.columns) from pandas.core.reshape.concat import concat if isinstance(other, (list, tuple)): to_concat = [self] + other else: to_concat = [self, other] return concat(to_concat, ignore_index=ignore_index, verify_integrity=verify_integrity, sort=sort) def join(self, other, on=None, how='left', lsuffix='', rsuffix='', sort=False): """ Join columns of another DataFrame. Join columns with `other` DataFrame either on index or on a key column. Efficiently join multiple DataFrame objects by index at once by passing a list. Parameters ---------- other : DataFrame, Series, or list of DataFrame Index should be similar to one of the columns in this one. If a Series is passed, its name attribute must be set, and that will be used as the column name in the resulting joined DataFrame. on : str, list of str, or array-like, optional Column or index level name(s) in the caller to join on the index in `other`, otherwise joins index-on-index. If multiple values given, the `other` DataFrame must have a MultiIndex. Can pass an array as the join key if it is not already contained in the calling DataFrame. Like an Excel VLOOKUP operation. how : {'left', 'right', 'outer', 'inner'}, default 'left' How to handle the operation of the two objects. * left: use calling frame's index (or column if on is specified) * right: use `other`'s index. * outer: form union of calling frame's index (or column if on is specified) with `other`'s index, and sort it. lexicographically. * inner: form intersection of calling frame's index (or column if on is specified) with `other`'s index, preserving the order of the calling's one. lsuffix : str, default '' Suffix to use from left frame's overlapping columns. rsuffix : str, default '' Suffix to use from right frame's overlapping columns. sort : bool, default False Order result DataFrame lexicographically by the join key. If False, the order of the join key depends on the join type (how keyword). Returns ------- DataFrame A dataframe containing columns from both the caller and `other`. See Also -------- DataFrame.merge : For column(s)-on-columns(s) operations. Notes ----- Parameters `on`, `lsuffix`, and `rsuffix` are not supported when passing a list of `DataFrame` objects. Support for specifying index levels as the `on` parameter was added in version 0.23.0. Examples -------- >>> df = pd.DataFrame({'key': ['K0', 'K1', 'K2', 'K3', 'K4', 'K5'], ... 'A': ['A0', 'A1', 'A2', 'A3', 'A4', 'A5']}) >>> df key A 0 K0 A0 1 K1 A1 2 K2 A2 3 K3 A3 4 K4 A4 5 K5 A5 >>> other = pd.DataFrame({'key': ['K0', 'K1', 'K2'], ... 'B': ['B0', 'B1', 'B2']}) >>> other key B 0 K0 B0 1 K1 B1 2 K2 B2 Join DataFrames using their indexes. >>> df.join(other, lsuffix='_caller', rsuffix='_other') key_caller A key_other B 0 K0 A0 K0 B0 1 K1 A1 K1 B1 2 K2 A2 K2 B2 3 K3 A3 NaN NaN 4 K4 A4 NaN NaN 5 K5 A5 NaN NaN If we want to join using the key columns, we need to set key to be the index in both `df` and `other`. The joined DataFrame will have key as its index. >>> df.set_index('key').join(other.set_index('key')) A B key K0 A0 B0 K1 A1 B1 K2 A2 B2 K3 A3 NaN K4 A4 NaN K5 A5 NaN Another option to join using the key columns is to use the `on` parameter. DataFrame.join always uses `other`'s index but we can use any column in `df`. This method preserves the original DataFrame's index in the result. >>> df.join(other.set_index('key'), on='key') key A B 0 K0 A0 B0 1 K1 A1 B1 2 K2 A2 B2 3 K3 A3 NaN 4 K4 A4 NaN 5 K5 A5 NaN """ # For SparseDataFrame's benefit return self._join_compat(other, on=on, how=how, lsuffix=lsuffix, rsuffix=rsuffix, sort=sort) def _join_compat(self, other, on=None, how='left', lsuffix='', rsuffix='', sort=False): from pandas.core.reshape.merge import merge from pandas.core.reshape.concat import concat if isinstance(other, Series): if other.name is None: raise ValueError('Other Series must have a name') other = DataFrame({other.name: other}) if isinstance(other, DataFrame): return merge(self, other, left_on=on, how=how, left_index=on is None, right_index=True, suffixes=(lsuffix, rsuffix), sort=sort) else: if on is not None: raise ValueError('Joining multiple DataFrames only supported' ' for joining on index') frames = [self] + list(other) can_concat = all(df.index.is_unique for df in frames) # join indexes only using concat if can_concat: if how == 'left': how = 'outer' join_axes = [self.index] else: join_axes = None return concat(frames, axis=1, join=how, join_axes=join_axes, verify_integrity=True) joined = frames[0] for frame in frames[1:]: joined = merge(joined, frame, how=how, left_index=True, right_index=True) return joined @Substitution('') @Appender(_merge_doc, indents=2) def merge(self, right, how='inner', on=None, left_on=None, right_on=None, left_index=False, right_index=False, sort=False, suffixes=('_x', '_y'), copy=True, indicator=False, validate=None): from pandas.core.reshape.merge import merge return merge(self, right, how=how, on=on, left_on=left_on, right_on=right_on, left_index=left_index, right_index=right_index, sort=sort, suffixes=suffixes, copy=copy, indicator=indicator, validate=validate) def round(self, decimals=0, *args, **kwargs): """ Round a DataFrame to a variable number of decimal places. Parameters ---------- decimals : int, dict, Series Number of decimal places to round each column to. If an int is given, round each column to the same number of places. Otherwise dict and Series round to variable numbers of places. Column names should be in the keys if `decimals` is a dict-like, or in the index if `decimals` is a Series. Any columns not included in `decimals` will be left as is. Elements of `decimals` which are not columns of the input will be ignored. *args Additional keywords have no effect but might be accepted for compatibility with numpy. **kwargs Additional keywords have no effect but might be accepted for compatibility with numpy. Returns ------- DataFrame A DataFrame with the affected columns rounded to the specified number of decimal places. See Also -------- numpy.around : Round a numpy array to the given number of decimals. Series.round : Round a Series to the given number of decimals. Examples -------- >>> df = pd.DataFrame([(.21, .32), (.01, .67), (.66, .03), (.21, .18)], ... columns=['dogs', 'cats']) >>> df dogs cats 0 0.21 0.32 1 0.01 0.67 2 0.66 0.03 3 0.21 0.18 By providing an integer each column is rounded to the same number of decimal places >>> df.round(1) dogs cats 0 0.2 0.3 1 0.0 0.7 2 0.7 0.0 3 0.2 0.2 With a dict, the number of places for specific columns can be specified with the column names as key and the number of decimal places as value >>> df.round({'dogs': 1, 'cats': 0}) dogs cats 0 0.2 0.0 1 0.0 1.0 2 0.7 0.0 3 0.2 0.0 Using a Series, the number of places for specific columns can be specified with the column names as index and the number of decimal places as value >>> decimals = pd.Series([0, 1], index=['cats', 'dogs']) >>> df.round(decimals) dogs cats 0 0.2 0.0 1 0.0 1.0 2 0.7 0.0 3 0.2 0.0 """ from pandas.core.reshape.concat import concat def _dict_round(df, decimals): for col, vals in df.iteritems(): try: yield _series_round(vals, decimals[col]) except KeyError: yield vals def _series_round(s, decimals): if is_integer_dtype(s) or is_float_dtype(s): return s.round(decimals) return s nv.validate_round(args, kwargs) if isinstance(decimals, (dict, Series)): if isinstance(decimals, Series): if not decimals.index.is_unique: raise ValueError("Index of decimals must be unique") new_cols = [col for col in _dict_round(self, decimals)] elif is_integer(decimals): # Dispatch to Series.round new_cols = [_series_round(v, decimals) for _, v in self.iteritems()] else: raise TypeError("decimals must be an integer, a dict-like or a " "Series") if len(new_cols) > 0: return self._constructor(concat(new_cols, axis=1), index=self.index, columns=self.columns) else: return self # ---------------------------------------------------------------------- # Statistical methods, etc. def corr(self, method='pearson', min_periods=1): """ Compute pairwise correlation of columns, excluding NA/null values. Parameters ---------- method : {'pearson', 'kendall', 'spearman'} or callable * pearson : standard correlation coefficient * kendall : Kendall Tau correlation coefficient * spearman : Spearman rank correlation * callable: callable with input two 1d ndarrays and returning a float. Note that the returned matrix from corr will have 1 along the diagonals and will be symmetric regardless of the callable's behavior .. versionadded:: 0.24.0 min_periods : int, optional Minimum number of observations required per pair of columns to have a valid result. Currently only available for Pearson and Spearman correlation. Returns ------- DataFrame Correlation matrix. See Also -------- DataFrame.corrwith Series.corr Examples -------- >>> def histogram_intersection(a, b): ... v = np.minimum(a, b).sum().round(decimals=1) ... return v >>> df = pd.DataFrame([(.2, .3), (.0, .6), (.6, .0), (.2, .1)], ... columns=['dogs', 'cats']) >>> df.corr(method=histogram_intersection) dogs cats dogs 1.0 0.3 cats 0.3 1.0 """ numeric_df = self._get_numeric_data() cols = numeric_df.columns idx = cols.copy() mat = numeric_df.values if method == 'pearson': correl = libalgos.nancorr(ensure_float64(mat), minp=min_periods) elif method == 'spearman': correl = libalgos.nancorr_spearman(ensure_float64(mat), minp=min_periods) elif method == 'kendall' or callable(method): if min_periods is None: min_periods = 1 mat = ensure_float64(mat).T corrf = nanops.get_corr_func(method) K = len(cols) correl = np.empty((K, K), dtype=float) mask = np.isfinite(mat) for i, ac in enumerate(mat): for j, bc in enumerate(mat): if i > j: continue valid = mask[i] & mask[j] if valid.sum() < min_periods: c = np.nan elif i == j: c = 1. elif not valid.all(): c = corrf(ac[valid], bc[valid]) else: c = corrf(ac, bc) correl[i, j] = c correl[j, i] = c else: raise ValueError("method must be either 'pearson', " "'spearman', 'kendall', or a callable, " "'{method}' was supplied".format(method=method)) return self._constructor(correl, index=idx, columns=cols) def cov(self, min_periods=None): """ Compute pairwise covariance of columns, excluding NA/null values. Compute the pairwise covariance among the series of a DataFrame. The returned data frame is the `covariance matrix <https://en.wikipedia.org/wiki/Covariance_matrix>`__ of the columns of the DataFrame. Both NA and null values are automatically excluded from the calculation. (See the note below about bias from missing values.) A threshold can be set for the minimum number of observations for each value created. Comparisons with observations below this threshold will be returned as ``NaN``. This method is generally used for the analysis of time series data to understand the relationship between different measures across time. Parameters ---------- min_periods : int, optional Minimum number of observations required per pair of columns to have a valid result. Returns ------- DataFrame The covariance matrix of the series of the DataFrame. See Also -------- Series.cov : Compute covariance with another Series. core.window.EWM.cov: Exponential weighted sample covariance. core.window.Expanding.cov : Expanding sample covariance. core.window.Rolling.cov : Rolling sample covariance. Notes ----- Returns the covariance matrix of the DataFrame's time series. The covariance is normalized by N-1. For DataFrames that have Series that are missing data (assuming that data is `missing at random <https://en.wikipedia.org/wiki/Missing_data#Missing_at_random>`__) the returned covariance matrix will be an unbiased estimate of the variance and covariance between the member Series. However, for many applications this estimate may not be acceptable because the estimate covariance matrix is not guaranteed to be positive semi-definite. This could lead to estimate correlations having absolute values which are greater than one, and/or a non-invertible covariance matrix. See `Estimation of covariance matrices <http://en.wikipedia.org/w/index.php?title=Estimation_of_covariance_ matrices>`__ for more details. Examples -------- >>> df = pd.DataFrame([(1, 2), (0, 3), (2, 0), (1, 1)], ... columns=['dogs', 'cats']) >>> df.cov() dogs cats dogs 0.666667 -1.000000 cats -1.000000 1.666667 >>> np.random.seed(42) >>> df = pd.DataFrame(np.random.randn(1000, 5), ... columns=['a', 'b', 'c', 'd', 'e']) >>> df.cov() a b c d e a 0.998438 -0.020161 0.059277 -0.008943 0.014144 b -0.020161 1.059352 -0.008543 -0.024738 0.009826 c 0.059277 -0.008543 1.010670 -0.001486 -0.000271 d -0.008943 -0.024738 -0.001486 0.921297 -0.013692 e 0.014144 0.009826 -0.000271 -0.013692 0.977795 **Minimum number of periods** This method also supports an optional ``min_periods`` keyword that specifies the required minimum number of non-NA observations for each column pair in order to have a valid result: >>> np.random.seed(42) >>> df = pd.DataFrame(np.random.randn(20, 3), ... columns=['a', 'b', 'c']) >>> df.loc[df.index[:5], 'a'] = np.nan >>> df.loc[df.index[5:10], 'b'] = np.nan >>> df.cov(min_periods=12) a b c a 0.316741 NaN -0.150812 b NaN 1.248003 0.191417 c -0.150812 0.191417 0.895202 """ numeric_df = self._get_numeric_data() cols = numeric_df.columns idx = cols.copy() mat = numeric_df.values if notna(mat).all(): if min_periods is not None and min_periods > len(mat): baseCov = np.empty((mat.shape[1], mat.shape[1])) baseCov.fill(np.nan) else: baseCov = np.cov(mat.T) baseCov = baseCov.reshape((len(cols), len(cols))) else: baseCov = libalgos.nancorr(ensure_float64(mat), cov=True, minp=min_periods) return self._constructor(baseCov, index=idx, columns=cols) def corrwith(self, other, axis=0, drop=False, method='pearson'): """ Compute pairwise correlation between rows or columns of DataFrame with rows or columns of Series or DataFrame. DataFrames are first aligned along both axes before computing the correlations. Parameters ---------- other : DataFrame, Series Object with which to compute correlations. axis : {0 or 'index', 1 or 'columns'}, default 0 0 or 'index' to compute column-wise, 1 or 'columns' for row-wise. drop : bool, default False Drop missing indices from result. method : {'pearson', 'kendall', 'spearman'} or callable * pearson : standard correlation coefficient * kendall : Kendall Tau correlation coefficient * spearman : Spearman rank correlation * callable: callable with input two 1d ndarrays and returning a float .. versionadded:: 0.24.0 Returns ------- Series Pairwise correlations. See Also -------- DataFrame.corr """ axis = self._get_axis_number(axis) this = self._get_numeric_data() if isinstance(other, Series): return this.apply(lambda x: other.corr(x, method=method), axis=axis) other = other._get_numeric_data() left, right = this.align(other, join='inner', copy=False) if axis == 1: left = left.T right = right.T if method == 'pearson': # mask missing values left = left + right * 0 right = right + left * 0 # demeaned data ldem = left - left.mean() rdem = right - right.mean() num = (ldem * rdem).sum() dom = (left.count() - 1) * left.std() * right.std() correl = num / dom elif method in ['kendall', 'spearman'] or callable(method): def c(x): return nanops.nancorr(x[0], x[1], method=method) correl = Series(map(c, zip(left.values.T, right.values.T)), index=left.columns) else: raise ValueError("Invalid method {method} was passed, " "valid methods are: 'pearson', 'kendall', " "'spearman', or callable". format(method=method)) if not drop: # Find non-matching labels along the given axis # and append missing correlations (GH 22375) raxis = 1 if axis == 0 else 0 result_index = (this._get_axis(raxis). union(other._get_axis(raxis))) idx_diff = result_index.difference(correl.index) if len(idx_diff) > 0: correl = correl.append(Series([np.nan] * len(idx_diff), index=idx_diff)) return correl # ---------------------------------------------------------------------- # ndarray-like stats methods def count(self, axis=0, level=None, numeric_only=False): """ Count non-NA cells for each column or row. The values `None`, `NaN`, `NaT`, and optionally `numpy.inf` (depending on `pandas.options.mode.use_inf_as_na`) are considered NA. Parameters ---------- axis : {0 or 'index', 1 or 'columns'}, default 0 If 0 or 'index' counts are generated for each column. If 1 or 'columns' counts are generated for each **row**. level : int or str, optional If the axis is a `MultiIndex` (hierarchical), count along a particular `level`, collapsing into a `DataFrame`. A `str` specifies the level name. numeric_only : bool, default False Include only `float`, `int` or `boolean` data. Returns ------- Series or DataFrame For each column/row the number of non-NA/null entries. If `level` is specified returns a `DataFrame`. See Also -------- Series.count: Number of non-NA elements in a Series. DataFrame.shape: Number of DataFrame rows and columns (including NA elements). DataFrame.isna: Boolean same-sized DataFrame showing places of NA elements. Examples -------- Constructing DataFrame from a dictionary: >>> df = pd.DataFrame({"Person": ... ["John", "Myla", "Lewis", "John", "Myla"], ... "Age": [24., np.nan, 21., 33, 26], ... "Single": [False, True, True, True, False]}) >>> df Person Age Single 0 John 24.0 False 1 Myla NaN True 2 Lewis 21.0 True 3 John 33.0 True 4 Myla 26.0 False Notice the uncounted NA values: >>> df.count() Person 5 Age 4 Single 5 dtype: int64 Counts for each **row**: >>> df.count(axis='columns') 0 3 1 2 2 3 3 3 4 3 dtype: int64 Counts for one level of a `MultiIndex`: >>> df.set_index(["Person", "Single"]).count(level="Person") Age Person John 2 Lewis 1 Myla 1 """ axis = self._get_axis_number(axis) if level is not None: return self._count_level(level, axis=axis, numeric_only=numeric_only) if numeric_only: frame = self._get_numeric_data() else: frame = self # GH #423 if len(frame._get_axis(axis)) == 0: result = Series(0, index=frame._get_agg_axis(axis)) else: if frame._is_mixed_type or frame._data.any_extension_types: # the or any_extension_types is really only hit for single- # column frames with an extension array result = notna(frame).sum(axis=axis) else: # GH13407 series_counts = notna(frame).sum(axis=axis) counts = series_counts.values result = Series(counts, index=frame._get_agg_axis(axis)) return result.astype('int64') def _count_level(self, level, axis=0, numeric_only=False): if numeric_only: frame = self._get_numeric_data() else: frame = self count_axis = frame._get_axis(axis) agg_axis = frame._get_agg_axis(axis) if not isinstance(count_axis, MultiIndex): raise TypeError("Can only count levels on hierarchical " "{ax}.".format(ax=self._get_axis_name(axis))) if frame._is_mixed_type: # Since we have mixed types, calling notna(frame.values) might # upcast everything to object mask = notna(frame).values else: # But use the speedup when we have homogeneous dtypes mask = notna(frame.values) if axis == 1: # We're transposing the mask rather than frame to avoid potential # upcasts to object, which induces a ~20x slowdown mask = mask.T if isinstance(level, str): level = count_axis._get_level_number(level) level_index = count_axis.levels[level] level_codes = ensure_int64(count_axis.codes[level]) counts = lib.count_level_2d(mask, level_codes, len(level_index), axis=0) result = DataFrame(counts, index=level_index, columns=agg_axis) if axis == 1: # Undo our earlier transpose return result.T else: return result def _reduce(self, op, name, axis=0, skipna=True, numeric_only=None, filter_type=None, **kwds): if axis is None and filter_type == 'bool': labels = None constructor = None else: # TODO: Make other agg func handle axis=None properly axis = self._get_axis_number(axis) labels = self._get_agg_axis(axis) constructor = self._constructor def f(x): return op(x, axis=axis, skipna=skipna, **kwds) # exclude timedelta/datetime unless we are uniform types if (axis == 1 and self._is_datelike_mixed_type and (not self._is_homogeneous_type and not is_datetime64tz_dtype(self.dtypes[0]))): numeric_only = True if numeric_only is None: try: values = self.values result = f(values) if (filter_type == 'bool' and is_object_dtype(values) and axis is None): # work around https://github.com/numpy/numpy/issues/10489 # TODO: combine with hasattr(result, 'dtype') further down # hard since we don't have `values` down there. result = np.bool_(result) except Exception as e: # try by-column first if filter_type is None and axis == 0: try: # this can end up with a non-reduction # but not always. if the types are mixed # with datelike then need to make sure a series # we only end up here if we have not specified # numeric_only and yet we have tried a # column-by-column reduction, where we have mixed type. # So let's just do what we can from pandas.core.apply import frame_apply opa = frame_apply(self, func=f, result_type='expand', ignore_failures=True) result = opa.get_result() if result.ndim == self.ndim: result = result.iloc[0] return result except Exception: pass if filter_type is None or filter_type == 'numeric': data = self._get_numeric_data() elif filter_type == 'bool': data = self._get_bool_data() else: # pragma: no cover e = NotImplementedError( "Handling exception with filter_type {f} not" "implemented.".format(f=filter_type)) raise_with_traceback(e) with np.errstate(all='ignore'): result = f(data.values) labels = data._get_agg_axis(axis) else: if numeric_only: if filter_type is None or filter_type == 'numeric': data = self._get_numeric_data() elif filter_type == 'bool': # GH 25101, # GH 24434 data = self._get_bool_data() if axis == 0 else self else: # pragma: no cover msg = ("Generating numeric_only data with filter_type {f}" "not supported.".format(f=filter_type)) raise NotImplementedError(msg) values = data.values labels = data._get_agg_axis(axis) else: values = self.values result = f(values) if hasattr(result, 'dtype') and is_object_dtype(result.dtype): try: if filter_type is None or filter_type == 'numeric': result = result.astype(np.float64) elif filter_type == 'bool' and notna(result).all(): result = result.astype(np.bool_) except (ValueError, TypeError): # try to coerce to the original dtypes item by item if we can if axis == 0: result = coerce_to_dtypes(result, self.dtypes) if constructor is not None: result = Series(result, index=labels) return result def nunique(self, axis=0, dropna=True): """ Count distinct observations over requested axis. Return Series with number of distinct observations. Can ignore NaN values. .. versionadded:: 0.20.0 Parameters ---------- axis : {0 or 'index', 1 or 'columns'}, default 0 The axis to use. 0 or 'index' for row-wise, 1 or 'columns' for column-wise. dropna : bool, default True Don't include NaN in the counts. Returns ------- Series See Also -------- Series.nunique: Method nunique for Series. DataFrame.count: Count non-NA cells for each column or row. Examples -------- >>> df = pd.DataFrame({'A': [1, 2, 3], 'B': [1, 1, 1]}) >>> df.nunique() A 3 B 1 dtype: int64 >>> df.nunique(axis=1) 0 1 1 2 2 2 dtype: int64 """ return self.apply(Series.nunique, axis=axis, dropna=dropna) def idxmin(self, axis=0, skipna=True): """ Return index of first occurrence of minimum over requested axis. NA/null values are excluded. Parameters ---------- axis : {0 or 'index', 1 or 'columns'}, default 0 0 or 'index' for row-wise, 1 or 'columns' for column-wise skipna : boolean, default True Exclude NA/null values. If an entire row/column is NA, the result will be NA. Returns ------- Series Indexes of minima along the specified axis. Raises ------ ValueError * If the row/column is empty See Also -------- Series.idxmin Notes ----- This method is the DataFrame version of ``ndarray.argmin``. """ axis = self._get_axis_number(axis) indices = nanops.nanargmin(self.values, axis=axis, skipna=skipna) index = self._get_axis(axis) result = [index[i] if i >= 0 else np.nan for i in indices] return Series(result, index=self._get_agg_axis(axis)) def idxmax(self, axis=0, skipna=True): """ Return index of first occurrence of maximum over requested axis. NA/null values are excluded. Parameters ---------- axis : {0 or 'index', 1 or 'columns'}, default 0 0 or 'index' for row-wise, 1 or 'columns' for column-wise skipna : boolean, default True Exclude NA/null values. If an entire row/column is NA, the result will be NA. Returns ------- Series Indexes of maxima along the specified axis. Raises ------ ValueError * If the row/column is empty See Also -------- Series.idxmax Notes ----- This method is the DataFrame version of ``ndarray.argmax``. """ axis = self._get_axis_number(axis) indices = nanops.nanargmax(self.values, axis=axis, skipna=skipna) index = self._get_axis(axis) result = [index[i] if i >= 0 else np.nan for i in indices] return Series(result, index=self._get_agg_axis(axis)) def _get_agg_axis(self, axis_num): """ Let's be explicit about this. """ if axis_num == 0: return self.columns elif axis_num == 1: return self.index else: raise ValueError('Axis must be 0 or 1 (got %r)' % axis_num) def mode(self, axis=0, numeric_only=False, dropna=True): """ Get the mode(s) of each element along the selected axis. The mode of a set of values is the value that appears most often. It can be multiple values. Parameters ---------- axis : {0 or 'index', 1 or 'columns'}, default 0 The axis to iterate over while searching for the mode: * 0 or 'index' : get mode of each column * 1 or 'columns' : get mode of each row numeric_only : bool, default False If True, only apply to numeric columns. dropna : bool, default True Don't consider counts of NaN/NaT. .. versionadded:: 0.24.0 Returns ------- DataFrame The modes of each column or row. See Also -------- Series.mode : Return the highest frequency value in a Series. Series.value_counts : Return the counts of values in a Series. Examples -------- >>> df = pd.DataFrame([('bird', 2, 2), ... ('mammal', 4, np.nan), ... ('arthropod', 8, 0), ... ('bird', 2, np.nan)], ... index=('falcon', 'horse', 'spider', 'ostrich'), ... columns=('species', 'legs', 'wings')) >>> df species legs wings falcon bird 2 2.0 horse mammal 4 NaN spider arthropod 8 0.0 ostrich bird 2 NaN By default, missing values are not considered, and the mode of wings are both 0 and 2. The second row of species and legs contains ``NaN``, because they have only one mode, but the DataFrame has two rows. >>> df.mode() species legs wings 0 bird 2.0 0.0 1 NaN NaN 2.0 Setting ``dropna=False`` ``NaN`` values are considered and they can be the mode (like for wings). >>> df.mode(dropna=False) species legs wings 0 bird 2 NaN Setting ``numeric_only=True``, only the mode of numeric columns is computed, and columns of other types are ignored. >>> df.mode(numeric_only=True) legs wings 0 2.0 0.0 1 NaN 2.0 To compute the mode over columns and not rows, use the axis parameter: >>> df.mode(axis='columns', numeric_only=True) 0 1 falcon 2.0 NaN horse 4.0 NaN spider 0.0 8.0 ostrich 2.0 NaN """ data = self if not numeric_only else self._get_numeric_data() def f(s): return s.mode(dropna=dropna) return data.apply(f, axis=axis) def quantile(self, q=0.5, axis=0, numeric_only=True, interpolation='linear'): """ Return values at the given quantile over requested axis. Parameters ---------- q : float or array-like, default 0.5 (50% quantile) Value between 0 <= q <= 1, the quantile(s) to compute. axis : {0, 1, 'index', 'columns'} (default 0) Equals 0 or 'index' for row-wise, 1 or 'columns' for column-wise. numeric_only : bool, default True If False, the quantile of datetime and timedelta data will be computed as well. interpolation : {'linear', 'lower', 'higher', 'midpoint', 'nearest'} This optional parameter specifies the interpolation method to use, when the desired quantile lies between two data points `i` and `j`: * linear: `i + (j - i) * fraction`, where `fraction` is the fractional part of the index surrounded by `i` and `j`. * lower: `i`. * higher: `j`. * nearest: `i` or `j` whichever is nearest. * midpoint: (`i` + `j`) / 2. .. versionadded:: 0.18.0 Returns ------- Series or DataFrame If ``q`` is an array, a DataFrame will be returned where the index is ``q``, the columns are the columns of self, and the values are the quantiles. If ``q`` is a float, a Series will be returned where the index is the columns of self and the values are the quantiles. See Also -------- core.window.Rolling.quantile: Rolling quantile. numpy.percentile: Numpy function to compute the percentile. Examples -------- >>> df = pd.DataFrame(np.array([[1, 1], [2, 10], [3, 100], [4, 100]]), ... columns=['a', 'b']) >>> df.quantile(.1) a 1.3 b 3.7 Name: 0.1, dtype: float64 >>> df.quantile([.1, .5]) a b 0.1 1.3 3.7 0.5 2.5 55.0 Specifying `numeric_only=False` will also compute the quantile of datetime and timedelta data. >>> df = pd.DataFrame({'A': [1, 2], ... 'B': [pd.Timestamp('2010'), ... pd.Timestamp('2011')], ... 'C': [pd.Timedelta('1 days'), ... pd.Timedelta('2 days')]}) >>> df.quantile(0.5, numeric_only=False) A 1.5 B 2010-07-02 12:00:00 C 1 days 12:00:00 Name: 0.5, dtype: object """ self._check_percentile(q) data = self._get_numeric_data() if numeric_only else self axis = self._get_axis_number(axis) is_transposed = axis == 1 if is_transposed: data = data.T result = data._data.quantile(qs=q, axis=1, interpolation=interpolation, transposed=is_transposed) if result.ndim == 2: result = self._constructor(result) else: result = self._constructor_sliced(result, name=q) if is_transposed: result = result.T return result def to_timestamp(self, freq=None, how='start', axis=0, copy=True): """ Cast to DatetimeIndex of timestamps, at *beginning* of period. Parameters ---------- freq : str, default frequency of PeriodIndex Desired frequency. how : {'s', 'e', 'start', 'end'} Convention for converting period to timestamp; start of period vs. end. axis : {0 or 'index', 1 or 'columns'}, default 0 The axis to convert (the index by default). copy : bool, default True If False then underlying input data is not copied. Returns ------- DataFrame with DatetimeIndex """ new_data = self._data if copy: new_data = new_data.copy() axis = self._get_axis_number(axis) if axis == 0: new_data.set_axis(1, self.index.to_timestamp(freq=freq, how=how)) elif axis == 1: new_data.set_axis(0, self.columns.to_timestamp(freq=freq, how=how)) else: # pragma: no cover raise AssertionError('Axis must be 0 or 1. Got {ax!s}'.format( ax=axis)) return self._constructor(new_data) def to_period(self, freq=None, axis=0, copy=True): """ Convert DataFrame from DatetimeIndex to PeriodIndex with desired frequency (inferred from index if not passed). Parameters ---------- freq : str, default Frequency of the PeriodIndex. axis : {0 or 'index', 1 or 'columns'}, default 0 The axis to convert (the index by default). copy : bool, default True If False then underlying input data is not copied. Returns ------- TimeSeries with PeriodIndex """ new_data = self._data if copy: new_data = new_data.copy() axis = self._get_axis_number(axis) if axis == 0: new_data.set_axis(1, self.index.to_period(freq=freq)) elif axis == 1: new_data.set_axis(0, self.columns.to_period(freq=freq)) else: # pragma: no cover raise AssertionError('Axis must be 0 or 1. Got {ax!s}'.format( ax=axis)) return self._constructor(new_data) def isin(self, values): """ Whether each element in the DataFrame is contained in values. Parameters ---------- values : iterable, Series, DataFrame or dict The result will only be true at a location if all the labels match. If `values` is a Series, that's the index. If `values` is a dict, the keys must be the column names, which must match. If `values` is a DataFrame, then both the index and column labels must match. Returns ------- DataFrame DataFrame of booleans showing whether each element in the DataFrame is contained in values. See Also -------- DataFrame.eq: Equality test for DataFrame. Series.isin: Equivalent method on Series. Series.str.contains: Test if pattern or regex is contained within a string of a Series or Index. Examples -------- >>> df = pd.DataFrame({'num_legs': [2, 4], 'num_wings': [2, 0]}, ... index=['falcon', 'dog']) >>> df num_legs num_wings falcon 2 2 dog 4 0 When ``values`` is a list check whether every value in the DataFrame is present in the list (which animals have 0 or 2 legs or wings) >>> df.isin([0, 2]) num_legs num_wings falcon True True dog False True When ``values`` is a dict, we can pass values to check for each column separately: >>> df.isin({'num_wings': [0, 3]}) num_legs num_wings falcon False False dog False True When ``values`` is a Series or DataFrame the index and column must match. Note that 'falcon' does not match based on the number of legs in df2. >>> other = pd.DataFrame({'num_legs': [8, 2], 'num_wings': [0, 2]}, ... index=['spider', 'falcon']) >>> df.isin(other) num_legs num_wings falcon True True dog False False """ if isinstance(values, dict): from pandas.core.reshape.concat import concat values = collections.defaultdict(list, values) return concat((self.iloc[:, [i]].isin(values[col]) for i, col in enumerate(self.columns)), axis=1) elif isinstance(values, Series): if not values.index.is_unique: raise ValueError("cannot compute isin with " "a duplicate axis.") return self.eq(values.reindex_like(self), axis='index') elif isinstance(values, DataFrame): if not (values.columns.is_unique and values.index.is_unique): raise ValueError("cannot compute isin with " "a duplicate axis.") return self.eq(values.reindex_like(self)) else: if not is_list_like(values): raise TypeError("only list-like or dict-like objects are " "allowed to be passed to DataFrame.isin(), " "you passed a " "{0!r}".format(type(values).__name__)) return DataFrame( algorithms.isin(self.values.ravel(), values).reshape(self.shape), self.index, self.columns) # ---------------------------------------------------------------------- # Add plotting methods to DataFrame plot = CachedAccessor("plot", pandas.plotting.FramePlotMethods) hist = pandas.plotting.hist_frame boxplot = pandas.plotting.boxplot_frame sparse = CachedAccessor("sparse", SparseFrameAccessor) DataFrame._setup_axes(['index', 'columns'], info_axis=1, stat_axis=0, axes_are_reversed=True, aliases={'rows': 0}, docs={ 'index': 'The index (row labels) of the DataFrame.', 'columns': 'The column labels of the DataFrame.'}) DataFrame._add_numeric_operations() DataFrame._add_series_or_dataframe_operations() ops.add_flex_arithmetic_methods(DataFrame) ops.add_special_arithmetic_methods(DataFrame) def _from_nested_dict(data): # TODO: this should be seriously cythonized new_data = OrderedDict() for index, s in data.items(): for col, v in s.items(): new_data[col] = new_data.get(col, OrderedDict()) new_data[col][index] = v return new_data def _put_str(s, space): return '{s}'.format(s=s)[:space].ljust(space)
import json import numpy as np import pytest from pandas import DataFrame, Index import pandas.util.testing as tm from pandas.io.json import json_normalize from pandas.io.json.normalize import nested_to_record @pytest.fixture def deep_nested(): # deeply nested data return [{'country': 'USA', 'states': [{'name': 'California', 'cities': [{'name': 'San Francisco', 'pop': 12345}, {'name': 'Los Angeles', 'pop': 12346}] }, {'name': 'Ohio', 'cities': [{'name': 'Columbus', 'pop': 1234}, {'name': 'Cleveland', 'pop': 1236}]} ] }, {'country': 'Germany', 'states': [{'name': 'Bayern', 'cities': [{'name': 'Munich', 'pop': 12347}] }, {'name': 'Nordrhein-Westfalen', 'cities': [{'name': 'Duesseldorf', 'pop': 1238}, {'name': 'Koeln', 'pop': 1239}]} ] } ] @pytest.fixture def state_data(): return [ {'counties': [{'name': 'Dade', 'population': 12345}, {'name': 'Broward', 'population': 40000}, {'name': 'Palm Beach', 'population': 60000}], 'info': {'governor': 'Rick Scott'}, 'shortname': 'FL', 'state': 'Florida'}, {'counties': [{'name': 'Summit', 'population': 1234}, {'name': 'Cuyahoga', 'population': 1337}], 'info': {'governor': 'John Kasich'}, 'shortname': 'OH', 'state': 'Ohio'}] @pytest.fixture def author_missing_data(): return [ {'info': None}, {'info': {'created_at': '11/08/1993', 'last_updated': '26/05/2012'}, 'author_name': {'first': 'Jane', 'last_name': 'Doe'} }] @pytest.fixture def missing_metadata(): return [ {'name': 'Alice', 'addresses': [{'number': 9562, 'street': 'Morris St.', 'city': 'Massillon', 'state': 'OH', 'zip': 44646}] }, {'addresses': [{'number': 8449, 'street': 'Spring St.', 'city': 'Elizabethton', 'state': 'TN', 'zip': 37643}] } ] class TestJSONNormalize: def test_simple_records(self): recs = [{'a': 1, 'b': 2, 'c': 3}, {'a': 4, 'b': 5, 'c': 6}, {'a': 7, 'b': 8, 'c': 9}, {'a': 10, 'b': 11, 'c': 12}] result = json_normalize(recs) expected = DataFrame(recs) tm.assert_frame_equal(result, expected) def test_simple_normalize(self, state_data): result = json_normalize(state_data[0], 'counties') expected = DataFrame(state_data[0]['counties']) tm.assert_frame_equal(result, expected) result = json_normalize(state_data, 'counties') expected = [] for rec in state_data: expected.extend(rec['counties']) expected = DataFrame(expected) tm.assert_frame_equal(result, expected) result = json_normalize(state_data, 'counties', meta='state') expected['state'] = np.array(['Florida', 'Ohio']).repeat([3, 2]) tm.assert_frame_equal(result, expected) def test_empty_array(self): result = json_normalize([]) expected = DataFrame() tm.assert_frame_equal(result, expected) def test_simple_normalize_with_separator(self, deep_nested): # GH 14883 result = json_normalize({'A': {'A': 1, 'B': 2}}) expected = DataFrame([[1, 2]], columns=['A.A', 'A.B']) tm.assert_frame_equal(result.reindex_like(expected), expected) result = json_normalize({'A': {'A': 1, 'B': 2}}, sep='_') expected = DataFrame([[1, 2]], columns=['A_A', 'A_B']) tm.assert_frame_equal(result.reindex_like(expected), expected) result = json_normalize({'A': {'A': 1, 'B': 2}}, sep='\u03c3') expected = DataFrame([[1, 2]], columns=['A\u03c3A', 'A\u03c3B']) tm.assert_frame_equal(result.reindex_like(expected), expected) result = json_normalize(deep_nested, ['states', 'cities'], meta=['country', ['states', 'name']], sep='_') expected = Index(['name', 'pop', 'country', 'states_name']).sort_values() assert result.columns.sort_values().equals(expected) def test_value_array_record_prefix(self): # GH 21536 result = json_normalize({'A': [1, 2]}, 'A', record_prefix='Prefix.') expected = DataFrame([[1], [2]], columns=['Prefix.0']) tm.assert_frame_equal(result, expected) def test_nested_object_record_path(self): # GH 22706 data = {'state': 'Florida', 'info': { 'governor': 'Rick Scott', 'counties': [{'name': 'Dade', 'population': 12345}, {'name': 'Broward', 'population': 40000}, {'name': 'Palm Beach', 'population': 60000}]}} result = json_normalize(data, record_path=["info", "counties"]) expected = DataFrame([['Dade', 12345], ['Broward', 40000], ['Palm Beach', 60000]], columns=['name', 'population']) tm.assert_frame_equal(result, expected) def test_more_deeply_nested(self, deep_nested): result = json_normalize(deep_nested, ['states', 'cities'], meta=['country', ['states', 'name']]) # meta_prefix={'states': 'state_'}) ex_data = {'country': ['USA'] * 4 + ['Germany'] * 3, 'states.name': ['California', 'California', 'Ohio', 'Ohio', 'Bayern', 'Nordrhein-Westfalen', 'Nordrhein-Westfalen'], 'name': ['San Francisco', 'Los Angeles', 'Columbus', 'Cleveland', 'Munich', 'Duesseldorf', 'Koeln'], 'pop': [12345, 12346, 1234, 1236, 12347, 1238, 1239]} expected = DataFrame(ex_data, columns=result.columns) tm.assert_frame_equal(result, expected) def test_shallow_nested(self): data = [{'state': 'Florida', 'shortname': 'FL', 'info': { 'governor': 'Rick Scott' }, 'counties': [{'name': 'Dade', 'population': 12345}, {'name': 'Broward', 'population': 40000}, {'name': 'Palm Beach', 'population': 60000}]}, {'state': 'Ohio', 'shortname': 'OH', 'info': { 'governor': 'John Kasich' }, 'counties': [{'name': 'Summit', 'population': 1234}, {'name': 'Cuyahoga', 'population': 1337}]}] result = json_normalize(data, 'counties', ['state', 'shortname', ['info', 'governor']]) ex_data = {'name': ['Dade', 'Broward', 'Palm Beach', 'Summit', 'Cuyahoga'], 'state': ['Florida'] * 3 + ['Ohio'] * 2, 'shortname': ['FL', 'FL', 'FL', 'OH', 'OH'], 'info.governor': ['Rick Scott'] * 3 + ['John Kasich'] * 2, 'population': [12345, 40000, 60000, 1234, 1337]} expected = DataFrame(ex_data, columns=result.columns) tm.assert_frame_equal(result, expected) def test_meta_name_conflict(self): data = [{'foo': 'hello', 'bar': 'there', 'data': [{'foo': 'something', 'bar': 'else'}, {'foo': 'something2', 'bar': 'else2'}]}] msg = (r"Conflicting metadata name (foo|bar)," " need distinguishing prefix") with pytest.raises(ValueError, match=msg): json_normalize(data, 'data', meta=['foo', 'bar']) result = json_normalize(data, 'data', meta=['foo', 'bar'], meta_prefix='meta') for val in ['metafoo', 'metabar', 'foo', 'bar']: assert val in result def test_meta_parameter_not_modified(self): # GH 18610 data = [{'foo': 'hello', 'bar': 'there', 'data': [{'foo': 'something', 'bar': 'else'}, {'foo': 'something2', 'bar': 'else2'}]}] COLUMNS = ['foo', 'bar'] result = json_normalize(data, 'data', meta=COLUMNS, meta_prefix='meta') assert COLUMNS == ['foo', 'bar'] for val in ['metafoo', 'metabar', 'foo', 'bar']: assert val in result def test_record_prefix(self, state_data): result = json_normalize(state_data[0], 'counties') expected = DataFrame(state_data[0]['counties']) tm.assert_frame_equal(result, expected) result = json_normalize(state_data, 'counties', meta='state', record_prefix='county_') expected = [] for rec in state_data: expected.extend(rec['counties']) expected = DataFrame(expected) expected = expected.rename(columns=lambda x: 'county_' + x) expected['state'] = np.array(['Florida', 'Ohio']).repeat([3, 2]) tm.assert_frame_equal(result, expected) def test_non_ascii_key(self): testjson = ( b'[{"\xc3\x9cnic\xc3\xb8de":0,"sub":{"A":1, "B":2}},' + b'{"\xc3\x9cnic\xc3\xb8de":1,"sub":{"A":3, "B":4}}]' ).decode('utf8') testdata = { 'sub.A': [1, 3], 'sub.B': [2, 4], b"\xc3\x9cnic\xc3\xb8de".decode('utf8'): [0, 1] } expected = DataFrame(testdata) result = json_normalize(json.loads(testjson)) tm.assert_frame_equal(result, expected) def test_missing_field(self, author_missing_data): # GH20030: result = json_normalize(author_missing_data) ex_data = [ {'info': np.nan, 'author_name.first': np.nan, 'author_name.last_name': np.nan, 'info.created_at': np.nan, 'info.last_updated': np.nan}, {'info': None, 'author_name.first': 'Jane', 'author_name.last_name': 'Doe', 'info.created_at': '11/08/1993', 'info.last_updated': '26/05/2012'} ] expected = DataFrame(ex_data) tm.assert_frame_equal(result, expected) class TestNestedToRecord: def test_flat_stays_flat(self): recs = [dict(flat1=1, flat2=2), dict(flat1=3, flat2=4), ] result = nested_to_record(recs) expected = recs assert result == expected def test_one_level_deep_flattens(self): data = dict(flat1=1, dict1=dict(c=1, d=2)) result = nested_to_record(data) expected = {'dict1.c': 1, 'dict1.d': 2, 'flat1': 1} assert result == expected def test_nested_flattens(self): data = dict(flat1=1, dict1=dict(c=1, d=2), nested=dict(e=dict(c=1, d=2), d=2)) result = nested_to_record(data) expected = {'dict1.c': 1, 'dict1.d': 2, 'flat1': 1, 'nested.d': 2, 'nested.e.c': 1, 'nested.e.d': 2} assert result == expected def test_json_normalize_errors(self, missing_metadata): # GH14583: # If meta keys are not always present a new option to set # errors='ignore' has been implemented msg = ("Try running with errors='ignore' as key 'name'" " is not always present") with pytest.raises(KeyError, match=msg): json_normalize( data=missing_metadata, record_path='addresses', meta='name', errors='raise') def test_missing_meta(self, missing_metadata): # GH25468 # If metadata is nullable with errors set to ignore, the null values # should be numpy.nan values result = json_normalize( data=missing_metadata, record_path='addresses', meta='name', errors='ignore') ex_data = [ {'city': 'Massillon', 'number': 9562, 'state': 'OH', 'street': 'Morris St.', 'zip': 44646, 'name': 'Alice'}, {'city': 'Elizabethton', 'number': 8449, 'state': 'TN', 'street': 'Spring St.', 'zip': 37643, 'name': np.nan} ] ex_data = [ ['Massillon', 9562, 'OH', 'Morris St.', 44646, 'Alice'], ['Elizabethton', 8449, 'TN', 'Spring St.', 37643, np.nan] ] columns = ['city', 'number', 'state', 'street', 'zip', 'name'] expected = DataFrame(ex_data, columns=columns) tm.assert_frame_equal(result, expected) def test_donot_drop_nonevalues(self): # GH21356 data = [ {'info': None, 'author_name': {'first': 'Smith', 'last_name': 'Appleseed'} }, {'info': {'created_at': '11/08/1993', 'last_updated': '26/05/2012'}, 'author_name': {'first': 'Jane', 'last_name': 'Doe'} } ] result = nested_to_record(data) expected = [ {'info': None, 'author_name.first': 'Smith', 'author_name.last_name': 'Appleseed'}, {'author_name.first': 'Jane', 'author_name.last_name': 'Doe', 'info.created_at': '11/08/1993', 'info.last_updated': '26/05/2012'}] assert result == expected def test_nonetype_top_level_bottom_level(self): # GH21158: If inner level json has a key with a null value # make sure it doesnt do a new_d.pop twice and except data = { "id": None, "location": { "country": { "state": { "id": None, "town.info": { "id": None, "region": None, "x": 49.151580810546875, "y": -33.148521423339844, "z": 27.572303771972656}}} } } result = nested_to_record(data) expected = { 'id': None, 'location.country.state.id': None, 'location.country.state.town.info.id': None, 'location.country.state.town.info.region': None, 'location.country.state.town.info.x': 49.151580810546875, 'location.country.state.town.info.y': -33.148521423339844, 'location.country.state.town.info.z': 27.572303771972656} assert result == expected def test_nonetype_multiple_levels(self): # GH21158: If inner level json has a key with a null value # make sure it doesnt do a new_d.pop twice and except data = { "id": None, "location": { "id": None, "country": { "id": None, "state": { "id": None, "town.info": { "region": None, "x": 49.151580810546875, "y": -33.148521423339844, "z": 27.572303771972656}}} } } result = nested_to_record(data) expected = { 'id': None, 'location.id': None, 'location.country.id': None, 'location.country.state.id': None, 'location.country.state.town.info.region': None, 'location.country.state.town.info.x': 49.151580810546875, 'location.country.state.town.info.y': -33.148521423339844, 'location.country.state.town.info.z': 27.572303771972656} assert result == expected
cbertinato/pandas
pandas/tests/io/json/test_normalize.py
pandas/core/frame.py
""" manage PyTables query interface via Expressions """ import ast from functools import partial import numpy as np from pandas._libs.tslibs import Timedelta, Timestamp from pandas.compat.chainmap import DeepChainMap from pandas.core.dtypes.common import is_list_like import pandas as pd from pandas.core.base import StringMixin import pandas.core.common as com from pandas.core.computation import expr, ops from pandas.core.computation.common import _ensure_decoded from pandas.core.computation.expr import BaseExprVisitor from pandas.core.computation.ops import UndefinedVariableError, is_term from pandas.io.formats.printing import pprint_thing, pprint_thing_encoded class Scope(expr.Scope): __slots__ = 'queryables', def __init__(self, level, global_dict=None, local_dict=None, queryables=None): super().__init__(level + 1, global_dict=global_dict, local_dict=local_dict) self.queryables = queryables or dict() class Term(ops.Term): def __new__(cls, name, env, side=None, encoding=None): klass = Constant if not isinstance(name, str) else cls supr_new = StringMixin.__new__ return supr_new(klass) def __init__(self, name, env, side=None, encoding=None): super().__init__(name, env, side=side, encoding=encoding) def _resolve_name(self): # must be a queryables if self.side == 'left': if self.name not in self.env.queryables: raise NameError('name {name!r} is not defined' .format(name=self.name)) return self.name # resolve the rhs (and allow it to be None) try: return self.env.resolve(self.name, is_local=False) except UndefinedVariableError: return self.name # read-only property overwriting read/write property @property # type: ignore def value(self): return self._value class Constant(Term): def __init__(self, value, env, side=None, encoding=None): super().__init__(value, env, side=side, encoding=encoding) def _resolve_name(self): return self._name class BinOp(ops.BinOp): _max_selectors = 31 def __init__(self, op, lhs, rhs, queryables, encoding): super().__init__(op, lhs, rhs) self.queryables = queryables self.encoding = encoding self.filter = None self.condition = None def _disallow_scalar_only_bool_ops(self): pass def prune(self, klass): def pr(left, right): """ create and return a new specialized BinOp from myself """ if left is None: return right elif right is None: return left k = klass if isinstance(left, ConditionBinOp): if (isinstance(left, ConditionBinOp) and isinstance(right, ConditionBinOp)): k = JointConditionBinOp elif isinstance(left, k): return left elif isinstance(right, k): return right elif isinstance(left, FilterBinOp): if (isinstance(left, FilterBinOp) and isinstance(right, FilterBinOp)): k = JointFilterBinOp elif isinstance(left, k): return left elif isinstance(right, k): return right return k(self.op, left, right, queryables=self.queryables, encoding=self.encoding).evaluate() left, right = self.lhs, self.rhs if is_term(left) and is_term(right): res = pr(left.value, right.value) elif not is_term(left) and is_term(right): res = pr(left.prune(klass), right.value) elif is_term(left) and not is_term(right): res = pr(left.value, right.prune(klass)) elif not (is_term(left) or is_term(right)): res = pr(left.prune(klass), right.prune(klass)) return res def conform(self, rhs): """ inplace conform rhs """ if not is_list_like(rhs): rhs = [rhs] if isinstance(rhs, np.ndarray): rhs = rhs.ravel() return rhs @property def is_valid(self): """ return True if this is a valid field """ return self.lhs in self.queryables @property def is_in_table(self): """ return True if this is a valid column name for generation (e.g. an actual column in the table) """ return self.queryables.get(self.lhs) is not None @property def kind(self): """ the kind of my field """ return getattr(self.queryables.get(self.lhs), 'kind', None) @property def meta(self): """ the meta of my field """ return getattr(self.queryables.get(self.lhs), 'meta', None) @property def metadata(self): """ the metadata of my field """ return getattr(self.queryables.get(self.lhs), 'metadata', None) def generate(self, v): """ create and return the op string for this TermValue """ val = v.tostring(self.encoding) return "({lhs} {op} {val})".format(lhs=self.lhs, op=self.op, val=val) def convert_value(self, v): """ convert the expression that is in the term to something that is accepted by pytables """ def stringify(value): if self.encoding is not None: encoder = partial(pprint_thing_encoded, encoding=self.encoding) else: encoder = pprint_thing return encoder(value) kind = _ensure_decoded(self.kind) meta = _ensure_decoded(self.meta) if kind == 'datetime64' or kind == 'datetime': if isinstance(v, (int, float)): v = stringify(v) v = _ensure_decoded(v) v = Timestamp(v) if v.tz is not None: v = v.tz_convert('UTC') return TermValue(v, v.value, kind) elif kind == 'timedelta64' or kind == 'timedelta': v = Timedelta(v, unit='s').value return TermValue(int(v), v, kind) elif meta == 'category': metadata = com.values_from_object(self.metadata) result = metadata.searchsorted(v, side='left') # result returns 0 if v is first element or if v is not in metadata # check that metadata contains v if not result and v not in metadata: result = -1 return TermValue(result, result, 'integer') elif kind == 'integer': v = int(float(v)) return TermValue(v, v, kind) elif kind == 'float': v = float(v) return TermValue(v, v, kind) elif kind == 'bool': if isinstance(v, str): v = not v.strip().lower() in ['false', 'f', 'no', 'n', 'none', '0', '[]', '{}', ''] else: v = bool(v) return TermValue(v, v, kind) elif isinstance(v, str): # string quoting return TermValue(v, stringify(v), 'string') else: raise TypeError("Cannot compare {v} of type {typ} to {kind} column" .format(v=v, typ=type(v), kind=kind)) def convert_values(self): pass class FilterBinOp(BinOp): def __str__(self): return pprint_thing("[Filter : [{lhs}] -> [{op}]" .format(lhs=self.filter[0], op=self.filter[1])) def invert(self): """ invert the filter """ if self.filter is not None: f = list(self.filter) f[1] = self.generate_filter_op(invert=True) self.filter = tuple(f) return self def format(self): """ return the actual filter format """ return [self.filter] def evaluate(self): if not self.is_valid: raise ValueError("query term is not valid [{slf}]" .format(slf=self)) rhs = self.conform(self.rhs) values = [TermValue(v, v, self.kind).value for v in rhs] if self.is_in_table: # if too many values to create the expression, use a filter instead if self.op in ['==', '!='] and len(values) > self._max_selectors: filter_op = self.generate_filter_op() self.filter = ( self.lhs, filter_op, pd.Index(values)) return self return None # equality conditions if self.op in ['==', '!=']: filter_op = self.generate_filter_op() self.filter = ( self.lhs, filter_op, pd.Index(values)) else: raise TypeError("passing a filterable condition to a non-table " "indexer [{slf}]".format(slf=self)) return self def generate_filter_op(self, invert=False): if (self.op == '!=' and not invert) or (self.op == '==' and invert): return lambda axis, vals: ~axis.isin(vals) else: return lambda axis, vals: axis.isin(vals) class JointFilterBinOp(FilterBinOp): def format(self): raise NotImplementedError("unable to collapse Joint Filters") def evaluate(self): return self class ConditionBinOp(BinOp): def __str__(self): return pprint_thing("[Condition : [{cond}]]" .format(cond=self.condition)) def invert(self): """ invert the condition """ # if self.condition is not None: # self.condition = "~(%s)" % self.condition # return self raise NotImplementedError("cannot use an invert condition when " "passing to numexpr") def format(self): """ return the actual ne format """ return self.condition def evaluate(self): if not self.is_valid: raise ValueError("query term is not valid [{slf}]" .format(slf=self)) # convert values if we are in the table if not self.is_in_table: return None rhs = self.conform(self.rhs) values = [self.convert_value(v) for v in rhs] # equality conditions if self.op in ['==', '!=']: # too many values to create the expression? if len(values) <= self._max_selectors: vs = [self.generate(v) for v in values] self.condition = "({cond})".format(cond=' | '.join(vs)) # use a filter after reading else: return None else: self.condition = self.generate(values[0]) return self class JointConditionBinOp(ConditionBinOp): def evaluate(self): self.condition = "({lhs} {op} {rhs})".format(lhs=self.lhs.condition, op=self.op, rhs=self.rhs.condition) return self class UnaryOp(ops.UnaryOp): def prune(self, klass): if self.op != '~': raise NotImplementedError("UnaryOp only support invert type ops") operand = self.operand operand = operand.prune(klass) if operand is not None: if issubclass(klass, ConditionBinOp): if operand.condition is not None: return operand.invert() elif issubclass(klass, FilterBinOp): if operand.filter is not None: return operand.invert() return None _op_classes = {'unary': UnaryOp} class ExprVisitor(BaseExprVisitor): const_type = Constant term_type = Term def __init__(self, env, engine, parser, **kwargs): super().__init__(env, engine, parser) for bin_op in self.binary_ops: bin_node = self.binary_op_nodes_map[bin_op] setattr(self, 'visit_{node}'.format(node=bin_node), lambda node, bin_op=bin_op: partial(BinOp, bin_op, **kwargs)) def visit_UnaryOp(self, node, **kwargs): if isinstance(node.op, (ast.Not, ast.Invert)): return UnaryOp('~', self.visit(node.operand)) elif isinstance(node.op, ast.USub): return self.const_type(-self.visit(node.operand).value, self.env) elif isinstance(node.op, ast.UAdd): raise NotImplementedError('Unary addition not supported') def visit_Index(self, node, **kwargs): return self.visit(node.value).value def visit_Assign(self, node, **kwargs): cmpr = ast.Compare(ops=[ast.Eq()], left=node.targets[0], comparators=[node.value]) return self.visit(cmpr) def visit_Subscript(self, node, **kwargs): # only allow simple subscripts value = self.visit(node.value) slobj = self.visit(node.slice) try: value = value.value except AttributeError: pass try: return self.const_type(value[slobj], self.env) except TypeError: raise ValueError("cannot subscript {value!r} with " "{slobj!r}".format(value=value, slobj=slobj)) def visit_Attribute(self, node, **kwargs): attr = node.attr value = node.value ctx = node.ctx.__class__ if ctx == ast.Load: # resolve the value resolved = self.visit(value) # try to get the value to see if we are another expression try: resolved = resolved.value except (AttributeError): pass try: return self.term_type(getattr(resolved, attr), self.env) except AttributeError: # something like datetime.datetime where scope is overridden if isinstance(value, ast.Name) and value.id == attr: return resolved raise ValueError("Invalid Attribute context {name}" .format(name=ctx.__name__)) def translate_In(self, op): return ast.Eq() if isinstance(op, ast.In) else op def _rewrite_membership_op(self, node, left, right): return self.visit(node.op), node.op, left, right def _validate_where(w): """ Validate that the where statement is of the right type. The type may either be String, Expr, or list-like of Exprs. Parameters ---------- w : String term expression, Expr, or list-like of Exprs. Returns ------- where : The original where clause if the check was successful. Raises ------ TypeError : An invalid data type was passed in for w (e.g. dict). """ if not (isinstance(w, (Expr, str)) or is_list_like(w)): raise TypeError("where must be passed as a string, Expr, " "or list-like of Exprs") return w class Expr(expr.Expr): """ hold a pytables like expression, comprised of possibly multiple 'terms' Parameters ---------- where : string term expression, Expr, or list-like of Exprs queryables : a "kinds" map (dict of column name -> kind), or None if column is non-indexable encoding : an encoding that will encode the query terms Returns ------- an Expr object Examples -------- 'index>=date' "columns=['A', 'D']" 'columns=A' 'columns==A' "~(columns=['A','B'])" 'index>df.index[3] & string="bar"' '(index>df.index[3] & index<=df.index[6]) | string="bar"' "ts>=Timestamp('2012-02-01')" "major_axis>=20130101" """ def __init__(self, where, queryables=None, encoding=None, scope_level=0): where = _validate_where(where) self.encoding = encoding self.condition = None self.filter = None self.terms = None self._visitor = None # capture the environment if needed local_dict = DeepChainMap() if isinstance(where, Expr): local_dict = where.env.scope where = where.expr elif isinstance(where, (list, tuple)): for idx, w in enumerate(where): if isinstance(w, Expr): local_dict = w.env.scope else: w = _validate_where(w) where[idx] = w where = ' & '.join(map('({})'.format, com.flatten(where))) # noqa self.expr = where self.env = Scope(scope_level + 1, local_dict=local_dict) if queryables is not None and isinstance(self.expr, str): self.env.queryables.update(queryables) self._visitor = ExprVisitor(self.env, queryables=queryables, parser='pytables', engine='pytables', encoding=encoding) self.terms = self.parse() def __str__(self): if self.terms is not None: return pprint_thing(self.terms) return pprint_thing(self.expr) def evaluate(self): """ create and return the numexpr condition and filter """ try: self.condition = self.terms.prune(ConditionBinOp) except AttributeError: raise ValueError("cannot process expression [{expr}], [{slf}] " "is not a valid condition".format(expr=self.expr, slf=self)) try: self.filter = self.terms.prune(FilterBinOp) except AttributeError: raise ValueError("cannot process expression [{expr}], [{slf}] " "is not a valid filter".format(expr=self.expr, slf=self)) return self.condition, self.filter class TermValue: """ hold a term value the we use to construct a condition/filter """ def __init__(self, value, converted, kind): self.value = value self.converted = converted self.kind = kind def tostring(self, encoding): """ quote the string if not encoded else encode and return """ if self.kind == 'string': if encoding is not None: return self.converted return '"{converted}"'.format(converted=self.converted) elif self.kind == 'float': # python 2 str(float) is not always # round-trippable so use repr() return repr(self.converted) return self.converted def maybe_expression(s): """ loose checking if s is a pytables-acceptable expression """ if not isinstance(s, str): return False ops = ExprVisitor.binary_ops + ExprVisitor.unary_ops + ('=',) # make sure we have an op at least return any(op in s for op in ops)
import json import numpy as np import pytest from pandas import DataFrame, Index import pandas.util.testing as tm from pandas.io.json import json_normalize from pandas.io.json.normalize import nested_to_record @pytest.fixture def deep_nested(): # deeply nested data return [{'country': 'USA', 'states': [{'name': 'California', 'cities': [{'name': 'San Francisco', 'pop': 12345}, {'name': 'Los Angeles', 'pop': 12346}] }, {'name': 'Ohio', 'cities': [{'name': 'Columbus', 'pop': 1234}, {'name': 'Cleveland', 'pop': 1236}]} ] }, {'country': 'Germany', 'states': [{'name': 'Bayern', 'cities': [{'name': 'Munich', 'pop': 12347}] }, {'name': 'Nordrhein-Westfalen', 'cities': [{'name': 'Duesseldorf', 'pop': 1238}, {'name': 'Koeln', 'pop': 1239}]} ] } ] @pytest.fixture def state_data(): return [ {'counties': [{'name': 'Dade', 'population': 12345}, {'name': 'Broward', 'population': 40000}, {'name': 'Palm Beach', 'population': 60000}], 'info': {'governor': 'Rick Scott'}, 'shortname': 'FL', 'state': 'Florida'}, {'counties': [{'name': 'Summit', 'population': 1234}, {'name': 'Cuyahoga', 'population': 1337}], 'info': {'governor': 'John Kasich'}, 'shortname': 'OH', 'state': 'Ohio'}] @pytest.fixture def author_missing_data(): return [ {'info': None}, {'info': {'created_at': '11/08/1993', 'last_updated': '26/05/2012'}, 'author_name': {'first': 'Jane', 'last_name': 'Doe'} }] @pytest.fixture def missing_metadata(): return [ {'name': 'Alice', 'addresses': [{'number': 9562, 'street': 'Morris St.', 'city': 'Massillon', 'state': 'OH', 'zip': 44646}] }, {'addresses': [{'number': 8449, 'street': 'Spring St.', 'city': 'Elizabethton', 'state': 'TN', 'zip': 37643}] } ] class TestJSONNormalize: def test_simple_records(self): recs = [{'a': 1, 'b': 2, 'c': 3}, {'a': 4, 'b': 5, 'c': 6}, {'a': 7, 'b': 8, 'c': 9}, {'a': 10, 'b': 11, 'c': 12}] result = json_normalize(recs) expected = DataFrame(recs) tm.assert_frame_equal(result, expected) def test_simple_normalize(self, state_data): result = json_normalize(state_data[0], 'counties') expected = DataFrame(state_data[0]['counties']) tm.assert_frame_equal(result, expected) result = json_normalize(state_data, 'counties') expected = [] for rec in state_data: expected.extend(rec['counties']) expected = DataFrame(expected) tm.assert_frame_equal(result, expected) result = json_normalize(state_data, 'counties', meta='state') expected['state'] = np.array(['Florida', 'Ohio']).repeat([3, 2]) tm.assert_frame_equal(result, expected) def test_empty_array(self): result = json_normalize([]) expected = DataFrame() tm.assert_frame_equal(result, expected) def test_simple_normalize_with_separator(self, deep_nested): # GH 14883 result = json_normalize({'A': {'A': 1, 'B': 2}}) expected = DataFrame([[1, 2]], columns=['A.A', 'A.B']) tm.assert_frame_equal(result.reindex_like(expected), expected) result = json_normalize({'A': {'A': 1, 'B': 2}}, sep='_') expected = DataFrame([[1, 2]], columns=['A_A', 'A_B']) tm.assert_frame_equal(result.reindex_like(expected), expected) result = json_normalize({'A': {'A': 1, 'B': 2}}, sep='\u03c3') expected = DataFrame([[1, 2]], columns=['A\u03c3A', 'A\u03c3B']) tm.assert_frame_equal(result.reindex_like(expected), expected) result = json_normalize(deep_nested, ['states', 'cities'], meta=['country', ['states', 'name']], sep='_') expected = Index(['name', 'pop', 'country', 'states_name']).sort_values() assert result.columns.sort_values().equals(expected) def test_value_array_record_prefix(self): # GH 21536 result = json_normalize({'A': [1, 2]}, 'A', record_prefix='Prefix.') expected = DataFrame([[1], [2]], columns=['Prefix.0']) tm.assert_frame_equal(result, expected) def test_nested_object_record_path(self): # GH 22706 data = {'state': 'Florida', 'info': { 'governor': 'Rick Scott', 'counties': [{'name': 'Dade', 'population': 12345}, {'name': 'Broward', 'population': 40000}, {'name': 'Palm Beach', 'population': 60000}]}} result = json_normalize(data, record_path=["info", "counties"]) expected = DataFrame([['Dade', 12345], ['Broward', 40000], ['Palm Beach', 60000]], columns=['name', 'population']) tm.assert_frame_equal(result, expected) def test_more_deeply_nested(self, deep_nested): result = json_normalize(deep_nested, ['states', 'cities'], meta=['country', ['states', 'name']]) # meta_prefix={'states': 'state_'}) ex_data = {'country': ['USA'] * 4 + ['Germany'] * 3, 'states.name': ['California', 'California', 'Ohio', 'Ohio', 'Bayern', 'Nordrhein-Westfalen', 'Nordrhein-Westfalen'], 'name': ['San Francisco', 'Los Angeles', 'Columbus', 'Cleveland', 'Munich', 'Duesseldorf', 'Koeln'], 'pop': [12345, 12346, 1234, 1236, 12347, 1238, 1239]} expected = DataFrame(ex_data, columns=result.columns) tm.assert_frame_equal(result, expected) def test_shallow_nested(self): data = [{'state': 'Florida', 'shortname': 'FL', 'info': { 'governor': 'Rick Scott' }, 'counties': [{'name': 'Dade', 'population': 12345}, {'name': 'Broward', 'population': 40000}, {'name': 'Palm Beach', 'population': 60000}]}, {'state': 'Ohio', 'shortname': 'OH', 'info': { 'governor': 'John Kasich' }, 'counties': [{'name': 'Summit', 'population': 1234}, {'name': 'Cuyahoga', 'population': 1337}]}] result = json_normalize(data, 'counties', ['state', 'shortname', ['info', 'governor']]) ex_data = {'name': ['Dade', 'Broward', 'Palm Beach', 'Summit', 'Cuyahoga'], 'state': ['Florida'] * 3 + ['Ohio'] * 2, 'shortname': ['FL', 'FL', 'FL', 'OH', 'OH'], 'info.governor': ['Rick Scott'] * 3 + ['John Kasich'] * 2, 'population': [12345, 40000, 60000, 1234, 1337]} expected = DataFrame(ex_data, columns=result.columns) tm.assert_frame_equal(result, expected) def test_meta_name_conflict(self): data = [{'foo': 'hello', 'bar': 'there', 'data': [{'foo': 'something', 'bar': 'else'}, {'foo': 'something2', 'bar': 'else2'}]}] msg = (r"Conflicting metadata name (foo|bar)," " need distinguishing prefix") with pytest.raises(ValueError, match=msg): json_normalize(data, 'data', meta=['foo', 'bar']) result = json_normalize(data, 'data', meta=['foo', 'bar'], meta_prefix='meta') for val in ['metafoo', 'metabar', 'foo', 'bar']: assert val in result def test_meta_parameter_not_modified(self): # GH 18610 data = [{'foo': 'hello', 'bar': 'there', 'data': [{'foo': 'something', 'bar': 'else'}, {'foo': 'something2', 'bar': 'else2'}]}] COLUMNS = ['foo', 'bar'] result = json_normalize(data, 'data', meta=COLUMNS, meta_prefix='meta') assert COLUMNS == ['foo', 'bar'] for val in ['metafoo', 'metabar', 'foo', 'bar']: assert val in result def test_record_prefix(self, state_data): result = json_normalize(state_data[0], 'counties') expected = DataFrame(state_data[0]['counties']) tm.assert_frame_equal(result, expected) result = json_normalize(state_data, 'counties', meta='state', record_prefix='county_') expected = [] for rec in state_data: expected.extend(rec['counties']) expected = DataFrame(expected) expected = expected.rename(columns=lambda x: 'county_' + x) expected['state'] = np.array(['Florida', 'Ohio']).repeat([3, 2]) tm.assert_frame_equal(result, expected) def test_non_ascii_key(self): testjson = ( b'[{"\xc3\x9cnic\xc3\xb8de":0,"sub":{"A":1, "B":2}},' + b'{"\xc3\x9cnic\xc3\xb8de":1,"sub":{"A":3, "B":4}}]' ).decode('utf8') testdata = { 'sub.A': [1, 3], 'sub.B': [2, 4], b"\xc3\x9cnic\xc3\xb8de".decode('utf8'): [0, 1] } expected = DataFrame(testdata) result = json_normalize(json.loads(testjson)) tm.assert_frame_equal(result, expected) def test_missing_field(self, author_missing_data): # GH20030: result = json_normalize(author_missing_data) ex_data = [ {'info': np.nan, 'author_name.first': np.nan, 'author_name.last_name': np.nan, 'info.created_at': np.nan, 'info.last_updated': np.nan}, {'info': None, 'author_name.first': 'Jane', 'author_name.last_name': 'Doe', 'info.created_at': '11/08/1993', 'info.last_updated': '26/05/2012'} ] expected = DataFrame(ex_data) tm.assert_frame_equal(result, expected) class TestNestedToRecord: def test_flat_stays_flat(self): recs = [dict(flat1=1, flat2=2), dict(flat1=3, flat2=4), ] result = nested_to_record(recs) expected = recs assert result == expected def test_one_level_deep_flattens(self): data = dict(flat1=1, dict1=dict(c=1, d=2)) result = nested_to_record(data) expected = {'dict1.c': 1, 'dict1.d': 2, 'flat1': 1} assert result == expected def test_nested_flattens(self): data = dict(flat1=1, dict1=dict(c=1, d=2), nested=dict(e=dict(c=1, d=2), d=2)) result = nested_to_record(data) expected = {'dict1.c': 1, 'dict1.d': 2, 'flat1': 1, 'nested.d': 2, 'nested.e.c': 1, 'nested.e.d': 2} assert result == expected def test_json_normalize_errors(self, missing_metadata): # GH14583: # If meta keys are not always present a new option to set # errors='ignore' has been implemented msg = ("Try running with errors='ignore' as key 'name'" " is not always present") with pytest.raises(KeyError, match=msg): json_normalize( data=missing_metadata, record_path='addresses', meta='name', errors='raise') def test_missing_meta(self, missing_metadata): # GH25468 # If metadata is nullable with errors set to ignore, the null values # should be numpy.nan values result = json_normalize( data=missing_metadata, record_path='addresses', meta='name', errors='ignore') ex_data = [ {'city': 'Massillon', 'number': 9562, 'state': 'OH', 'street': 'Morris St.', 'zip': 44646, 'name': 'Alice'}, {'city': 'Elizabethton', 'number': 8449, 'state': 'TN', 'street': 'Spring St.', 'zip': 37643, 'name': np.nan} ] ex_data = [ ['Massillon', 9562, 'OH', 'Morris St.', 44646, 'Alice'], ['Elizabethton', 8449, 'TN', 'Spring St.', 37643, np.nan] ] columns = ['city', 'number', 'state', 'street', 'zip', 'name'] expected = DataFrame(ex_data, columns=columns) tm.assert_frame_equal(result, expected) def test_donot_drop_nonevalues(self): # GH21356 data = [ {'info': None, 'author_name': {'first': 'Smith', 'last_name': 'Appleseed'} }, {'info': {'created_at': '11/08/1993', 'last_updated': '26/05/2012'}, 'author_name': {'first': 'Jane', 'last_name': 'Doe'} } ] result = nested_to_record(data) expected = [ {'info': None, 'author_name.first': 'Smith', 'author_name.last_name': 'Appleseed'}, {'author_name.first': 'Jane', 'author_name.last_name': 'Doe', 'info.created_at': '11/08/1993', 'info.last_updated': '26/05/2012'}] assert result == expected def test_nonetype_top_level_bottom_level(self): # GH21158: If inner level json has a key with a null value # make sure it doesnt do a new_d.pop twice and except data = { "id": None, "location": { "country": { "state": { "id": None, "town.info": { "id": None, "region": None, "x": 49.151580810546875, "y": -33.148521423339844, "z": 27.572303771972656}}} } } result = nested_to_record(data) expected = { 'id': None, 'location.country.state.id': None, 'location.country.state.town.info.id': None, 'location.country.state.town.info.region': None, 'location.country.state.town.info.x': 49.151580810546875, 'location.country.state.town.info.y': -33.148521423339844, 'location.country.state.town.info.z': 27.572303771972656} assert result == expected def test_nonetype_multiple_levels(self): # GH21158: If inner level json has a key with a null value # make sure it doesnt do a new_d.pop twice and except data = { "id": None, "location": { "id": None, "country": { "id": None, "state": { "id": None, "town.info": { "region": None, "x": 49.151580810546875, "y": -33.148521423339844, "z": 27.572303771972656}}} } } result = nested_to_record(data) expected = { 'id': None, 'location.id': None, 'location.country.id': None, 'location.country.state.id': None, 'location.country.state.town.info.region': None, 'location.country.state.town.info.x': 49.151580810546875, 'location.country.state.town.info.y': -33.148521423339844, 'location.country.state.town.info.z': 27.572303771972656} assert result == expected
cbertinato/pandas
pandas/tests/io/json/test_normalize.py
pandas/core/computation/pytables.py
import numpy as np import pytest import pandas as pd from pandas import Index, MultiIndex @pytest.fixture def idx(): # a MultiIndex used to test the general functionality of the # general functionality of this object major_axis = Index(['foo', 'bar', 'baz', 'qux']) minor_axis = Index(['one', 'two']) major_codes = np.array([0, 0, 1, 2, 3, 3]) minor_codes = np.array([0, 1, 0, 1, 0, 1]) index_names = ['first', 'second'] mi = MultiIndex(levels=[major_axis, minor_axis], codes=[major_codes, minor_codes], names=index_names, verify_integrity=False) return mi @pytest.fixture def idx_dup(): # compare tests/indexes/multi/conftest.py major_axis = Index(['foo', 'bar', 'baz', 'qux']) minor_axis = Index(['one', 'two']) major_codes = np.array([0, 0, 1, 0, 1, 1]) minor_codes = np.array([0, 1, 0, 1, 0, 1]) index_names = ['first', 'second'] mi = MultiIndex(levels=[major_axis, minor_axis], codes=[major_codes, minor_codes], names=index_names, verify_integrity=False) return mi @pytest.fixture def index_names(): # names that match those in the idx fixture for testing equality of # names assigned to the idx return ['first', 'second'] @pytest.fixture def holder(): # the MultiIndex constructor used to base compatibility with pickle return MultiIndex @pytest.fixture def compat_props(): # a MultiIndex must have these properties associated with it return ['shape', 'ndim', 'size'] @pytest.fixture def narrow_multi_index(): """ Return a MultiIndex that is narrower than the display (<80 characters). """ n = 1000 ci = pd.CategoricalIndex(list('a' * n) + (['abc'] * n)) dti = pd.date_range('2000-01-01', freq='s', periods=n * 2) return pd.MultiIndex.from_arrays([ci, ci.codes + 9, dti], names=['a', 'b', 'dti']) @pytest.fixture def wide_multi_index(): """ Return a MultiIndex that is wider than the display (>80 characters). """ n = 1000 ci = pd.CategoricalIndex(list('a' * n) + (['abc'] * n)) dti = pd.date_range('2000-01-01', freq='s', periods=n * 2) levels = [ci, ci.codes + 9, dti, dti, dti] names = ['a', 'b', 'dti_1', 'dti_2', 'dti_3'] return pd.MultiIndex.from_arrays(levels, names=names)
import json import numpy as np import pytest from pandas import DataFrame, Index import pandas.util.testing as tm from pandas.io.json import json_normalize from pandas.io.json.normalize import nested_to_record @pytest.fixture def deep_nested(): # deeply nested data return [{'country': 'USA', 'states': [{'name': 'California', 'cities': [{'name': 'San Francisco', 'pop': 12345}, {'name': 'Los Angeles', 'pop': 12346}] }, {'name': 'Ohio', 'cities': [{'name': 'Columbus', 'pop': 1234}, {'name': 'Cleveland', 'pop': 1236}]} ] }, {'country': 'Germany', 'states': [{'name': 'Bayern', 'cities': [{'name': 'Munich', 'pop': 12347}] }, {'name': 'Nordrhein-Westfalen', 'cities': [{'name': 'Duesseldorf', 'pop': 1238}, {'name': 'Koeln', 'pop': 1239}]} ] } ] @pytest.fixture def state_data(): return [ {'counties': [{'name': 'Dade', 'population': 12345}, {'name': 'Broward', 'population': 40000}, {'name': 'Palm Beach', 'population': 60000}], 'info': {'governor': 'Rick Scott'}, 'shortname': 'FL', 'state': 'Florida'}, {'counties': [{'name': 'Summit', 'population': 1234}, {'name': 'Cuyahoga', 'population': 1337}], 'info': {'governor': 'John Kasich'}, 'shortname': 'OH', 'state': 'Ohio'}] @pytest.fixture def author_missing_data(): return [ {'info': None}, {'info': {'created_at': '11/08/1993', 'last_updated': '26/05/2012'}, 'author_name': {'first': 'Jane', 'last_name': 'Doe'} }] @pytest.fixture def missing_metadata(): return [ {'name': 'Alice', 'addresses': [{'number': 9562, 'street': 'Morris St.', 'city': 'Massillon', 'state': 'OH', 'zip': 44646}] }, {'addresses': [{'number': 8449, 'street': 'Spring St.', 'city': 'Elizabethton', 'state': 'TN', 'zip': 37643}] } ] class TestJSONNormalize: def test_simple_records(self): recs = [{'a': 1, 'b': 2, 'c': 3}, {'a': 4, 'b': 5, 'c': 6}, {'a': 7, 'b': 8, 'c': 9}, {'a': 10, 'b': 11, 'c': 12}] result = json_normalize(recs) expected = DataFrame(recs) tm.assert_frame_equal(result, expected) def test_simple_normalize(self, state_data): result = json_normalize(state_data[0], 'counties') expected = DataFrame(state_data[0]['counties']) tm.assert_frame_equal(result, expected) result = json_normalize(state_data, 'counties') expected = [] for rec in state_data: expected.extend(rec['counties']) expected = DataFrame(expected) tm.assert_frame_equal(result, expected) result = json_normalize(state_data, 'counties', meta='state') expected['state'] = np.array(['Florida', 'Ohio']).repeat([3, 2]) tm.assert_frame_equal(result, expected) def test_empty_array(self): result = json_normalize([]) expected = DataFrame() tm.assert_frame_equal(result, expected) def test_simple_normalize_with_separator(self, deep_nested): # GH 14883 result = json_normalize({'A': {'A': 1, 'B': 2}}) expected = DataFrame([[1, 2]], columns=['A.A', 'A.B']) tm.assert_frame_equal(result.reindex_like(expected), expected) result = json_normalize({'A': {'A': 1, 'B': 2}}, sep='_') expected = DataFrame([[1, 2]], columns=['A_A', 'A_B']) tm.assert_frame_equal(result.reindex_like(expected), expected) result = json_normalize({'A': {'A': 1, 'B': 2}}, sep='\u03c3') expected = DataFrame([[1, 2]], columns=['A\u03c3A', 'A\u03c3B']) tm.assert_frame_equal(result.reindex_like(expected), expected) result = json_normalize(deep_nested, ['states', 'cities'], meta=['country', ['states', 'name']], sep='_') expected = Index(['name', 'pop', 'country', 'states_name']).sort_values() assert result.columns.sort_values().equals(expected) def test_value_array_record_prefix(self): # GH 21536 result = json_normalize({'A': [1, 2]}, 'A', record_prefix='Prefix.') expected = DataFrame([[1], [2]], columns=['Prefix.0']) tm.assert_frame_equal(result, expected) def test_nested_object_record_path(self): # GH 22706 data = {'state': 'Florida', 'info': { 'governor': 'Rick Scott', 'counties': [{'name': 'Dade', 'population': 12345}, {'name': 'Broward', 'population': 40000}, {'name': 'Palm Beach', 'population': 60000}]}} result = json_normalize(data, record_path=["info", "counties"]) expected = DataFrame([['Dade', 12345], ['Broward', 40000], ['Palm Beach', 60000]], columns=['name', 'population']) tm.assert_frame_equal(result, expected) def test_more_deeply_nested(self, deep_nested): result = json_normalize(deep_nested, ['states', 'cities'], meta=['country', ['states', 'name']]) # meta_prefix={'states': 'state_'}) ex_data = {'country': ['USA'] * 4 + ['Germany'] * 3, 'states.name': ['California', 'California', 'Ohio', 'Ohio', 'Bayern', 'Nordrhein-Westfalen', 'Nordrhein-Westfalen'], 'name': ['San Francisco', 'Los Angeles', 'Columbus', 'Cleveland', 'Munich', 'Duesseldorf', 'Koeln'], 'pop': [12345, 12346, 1234, 1236, 12347, 1238, 1239]} expected = DataFrame(ex_data, columns=result.columns) tm.assert_frame_equal(result, expected) def test_shallow_nested(self): data = [{'state': 'Florida', 'shortname': 'FL', 'info': { 'governor': 'Rick Scott' }, 'counties': [{'name': 'Dade', 'population': 12345}, {'name': 'Broward', 'population': 40000}, {'name': 'Palm Beach', 'population': 60000}]}, {'state': 'Ohio', 'shortname': 'OH', 'info': { 'governor': 'John Kasich' }, 'counties': [{'name': 'Summit', 'population': 1234}, {'name': 'Cuyahoga', 'population': 1337}]}] result = json_normalize(data, 'counties', ['state', 'shortname', ['info', 'governor']]) ex_data = {'name': ['Dade', 'Broward', 'Palm Beach', 'Summit', 'Cuyahoga'], 'state': ['Florida'] * 3 + ['Ohio'] * 2, 'shortname': ['FL', 'FL', 'FL', 'OH', 'OH'], 'info.governor': ['Rick Scott'] * 3 + ['John Kasich'] * 2, 'population': [12345, 40000, 60000, 1234, 1337]} expected = DataFrame(ex_data, columns=result.columns) tm.assert_frame_equal(result, expected) def test_meta_name_conflict(self): data = [{'foo': 'hello', 'bar': 'there', 'data': [{'foo': 'something', 'bar': 'else'}, {'foo': 'something2', 'bar': 'else2'}]}] msg = (r"Conflicting metadata name (foo|bar)," " need distinguishing prefix") with pytest.raises(ValueError, match=msg): json_normalize(data, 'data', meta=['foo', 'bar']) result = json_normalize(data, 'data', meta=['foo', 'bar'], meta_prefix='meta') for val in ['metafoo', 'metabar', 'foo', 'bar']: assert val in result def test_meta_parameter_not_modified(self): # GH 18610 data = [{'foo': 'hello', 'bar': 'there', 'data': [{'foo': 'something', 'bar': 'else'}, {'foo': 'something2', 'bar': 'else2'}]}] COLUMNS = ['foo', 'bar'] result = json_normalize(data, 'data', meta=COLUMNS, meta_prefix='meta') assert COLUMNS == ['foo', 'bar'] for val in ['metafoo', 'metabar', 'foo', 'bar']: assert val in result def test_record_prefix(self, state_data): result = json_normalize(state_data[0], 'counties') expected = DataFrame(state_data[0]['counties']) tm.assert_frame_equal(result, expected) result = json_normalize(state_data, 'counties', meta='state', record_prefix='county_') expected = [] for rec in state_data: expected.extend(rec['counties']) expected = DataFrame(expected) expected = expected.rename(columns=lambda x: 'county_' + x) expected['state'] = np.array(['Florida', 'Ohio']).repeat([3, 2]) tm.assert_frame_equal(result, expected) def test_non_ascii_key(self): testjson = ( b'[{"\xc3\x9cnic\xc3\xb8de":0,"sub":{"A":1, "B":2}},' + b'{"\xc3\x9cnic\xc3\xb8de":1,"sub":{"A":3, "B":4}}]' ).decode('utf8') testdata = { 'sub.A': [1, 3], 'sub.B': [2, 4], b"\xc3\x9cnic\xc3\xb8de".decode('utf8'): [0, 1] } expected = DataFrame(testdata) result = json_normalize(json.loads(testjson)) tm.assert_frame_equal(result, expected) def test_missing_field(self, author_missing_data): # GH20030: result = json_normalize(author_missing_data) ex_data = [ {'info': np.nan, 'author_name.first': np.nan, 'author_name.last_name': np.nan, 'info.created_at': np.nan, 'info.last_updated': np.nan}, {'info': None, 'author_name.first': 'Jane', 'author_name.last_name': 'Doe', 'info.created_at': '11/08/1993', 'info.last_updated': '26/05/2012'} ] expected = DataFrame(ex_data) tm.assert_frame_equal(result, expected) class TestNestedToRecord: def test_flat_stays_flat(self): recs = [dict(flat1=1, flat2=2), dict(flat1=3, flat2=4), ] result = nested_to_record(recs) expected = recs assert result == expected def test_one_level_deep_flattens(self): data = dict(flat1=1, dict1=dict(c=1, d=2)) result = nested_to_record(data) expected = {'dict1.c': 1, 'dict1.d': 2, 'flat1': 1} assert result == expected def test_nested_flattens(self): data = dict(flat1=1, dict1=dict(c=1, d=2), nested=dict(e=dict(c=1, d=2), d=2)) result = nested_to_record(data) expected = {'dict1.c': 1, 'dict1.d': 2, 'flat1': 1, 'nested.d': 2, 'nested.e.c': 1, 'nested.e.d': 2} assert result == expected def test_json_normalize_errors(self, missing_metadata): # GH14583: # If meta keys are not always present a new option to set # errors='ignore' has been implemented msg = ("Try running with errors='ignore' as key 'name'" " is not always present") with pytest.raises(KeyError, match=msg): json_normalize( data=missing_metadata, record_path='addresses', meta='name', errors='raise') def test_missing_meta(self, missing_metadata): # GH25468 # If metadata is nullable with errors set to ignore, the null values # should be numpy.nan values result = json_normalize( data=missing_metadata, record_path='addresses', meta='name', errors='ignore') ex_data = [ {'city': 'Massillon', 'number': 9562, 'state': 'OH', 'street': 'Morris St.', 'zip': 44646, 'name': 'Alice'}, {'city': 'Elizabethton', 'number': 8449, 'state': 'TN', 'street': 'Spring St.', 'zip': 37643, 'name': np.nan} ] ex_data = [ ['Massillon', 9562, 'OH', 'Morris St.', 44646, 'Alice'], ['Elizabethton', 8449, 'TN', 'Spring St.', 37643, np.nan] ] columns = ['city', 'number', 'state', 'street', 'zip', 'name'] expected = DataFrame(ex_data, columns=columns) tm.assert_frame_equal(result, expected) def test_donot_drop_nonevalues(self): # GH21356 data = [ {'info': None, 'author_name': {'first': 'Smith', 'last_name': 'Appleseed'} }, {'info': {'created_at': '11/08/1993', 'last_updated': '26/05/2012'}, 'author_name': {'first': 'Jane', 'last_name': 'Doe'} } ] result = nested_to_record(data) expected = [ {'info': None, 'author_name.first': 'Smith', 'author_name.last_name': 'Appleseed'}, {'author_name.first': 'Jane', 'author_name.last_name': 'Doe', 'info.created_at': '11/08/1993', 'info.last_updated': '26/05/2012'}] assert result == expected def test_nonetype_top_level_bottom_level(self): # GH21158: If inner level json has a key with a null value # make sure it doesnt do a new_d.pop twice and except data = { "id": None, "location": { "country": { "state": { "id": None, "town.info": { "id": None, "region": None, "x": 49.151580810546875, "y": -33.148521423339844, "z": 27.572303771972656}}} } } result = nested_to_record(data) expected = { 'id': None, 'location.country.state.id': None, 'location.country.state.town.info.id': None, 'location.country.state.town.info.region': None, 'location.country.state.town.info.x': 49.151580810546875, 'location.country.state.town.info.y': -33.148521423339844, 'location.country.state.town.info.z': 27.572303771972656} assert result == expected def test_nonetype_multiple_levels(self): # GH21158: If inner level json has a key with a null value # make sure it doesnt do a new_d.pop twice and except data = { "id": None, "location": { "id": None, "country": { "id": None, "state": { "id": None, "town.info": { "region": None, "x": 49.151580810546875, "y": -33.148521423339844, "z": 27.572303771972656}}} } } result = nested_to_record(data) expected = { 'id': None, 'location.id': None, 'location.country.id': None, 'location.country.state.id': None, 'location.country.state.town.info.region': None, 'location.country.state.town.info.x': 49.151580810546875, 'location.country.state.town.info.y': -33.148521423339844, 'location.country.state.town.info.z': 27.572303771972656} assert result == expected
cbertinato/pandas
pandas/tests/io/json/test_normalize.py
pandas/tests/indexes/multi/conftest.py
import numpy as np import pytest from pandas._libs.tslib import iNaT from pandas.core.dtypes.dtypes import CategoricalDtype import pandas as pd from pandas import ( CategoricalIndex, DatetimeIndex, Index, Int64Index, IntervalIndex, MultiIndex, PeriodIndex, RangeIndex, Series, TimedeltaIndex, UInt64Index, isna) from pandas.core.indexes.base import InvalidIndexError from pandas.core.indexes.datetimelike import DatetimeIndexOpsMixin import pandas.util.testing as tm class Base: """ base class for index sub-class tests """ _holder = None _compat_props = ['shape', 'ndim', 'size', 'nbytes'] def setup_indices(self): for name, idx in self.indices.items(): setattr(self, name, idx) def test_pickle_compat_construction(self): # need an object to create with msg = (r"Index\(\.\.\.\) must be called with a collection of some" r" kind, None was passed|" r"__new__\(\) missing 1 required positional argument: 'data'|" r"__new__\(\) takes at least 2 arguments \(1 given\)") with pytest.raises(TypeError, match=msg): self._holder() def test_to_series(self): # assert that we are creating a copy of the index idx = self.create_index() s = idx.to_series() assert s.values is not idx.values assert s.index is not idx assert s.name == idx.name def test_to_series_with_arguments(self): # GH18699 # index kwarg idx = self.create_index() s = idx.to_series(index=idx) assert s.values is not idx.values assert s.index is idx assert s.name == idx.name # name kwarg idx = self.create_index() s = idx.to_series(name='__test') assert s.values is not idx.values assert s.index is not idx assert s.name != idx.name @pytest.mark.parametrize("name", [None, "new_name"]) def test_to_frame(self, name): # see GH-15230, GH-22580 idx = self.create_index() if name: idx_name = name else: idx_name = idx.name or 0 df = idx.to_frame(name=idx_name) assert df.index is idx assert len(df.columns) == 1 assert df.columns[0] == idx_name assert df[idx_name].values is not idx.values df = idx.to_frame(index=False, name=idx_name) assert df.index is not idx def test_to_frame_datetime_tz(self): # GH 25809 idx = pd.date_range(start='2019-01-01', end='2019-01-30', freq='D') idx = idx.tz_localize('UTC') result = idx.to_frame() expected = pd.DataFrame(idx, index=idx) tm.assert_frame_equal(result, expected) def test_shift(self): # GH8083 test the base class for shift idx = self.create_index() msg = "Not supported for type {}".format(type(idx).__name__) with pytest.raises(NotImplementedError, match=msg): idx.shift(1) with pytest.raises(NotImplementedError, match=msg): idx.shift(1, 2) def test_create_index_existing_name(self): # GH11193, when an existing index is passed, and a new name is not # specified, the new index should inherit the previous object name expected = self.create_index() if not isinstance(expected, MultiIndex): expected.name = 'foo' result = pd.Index(expected) tm.assert_index_equal(result, expected) result = pd.Index(expected, name='bar') expected.name = 'bar' tm.assert_index_equal(result, expected) else: expected.names = ['foo', 'bar'] result = pd.Index(expected) tm.assert_index_equal( result, Index(Index([('foo', 'one'), ('foo', 'two'), ('bar', 'one'), ('baz', 'two'), ('qux', 'one'), ('qux', 'two')], dtype='object'), names=['foo', 'bar'])) result = pd.Index(expected, names=['A', 'B']) tm.assert_index_equal( result, Index(Index([('foo', 'one'), ('foo', 'two'), ('bar', 'one'), ('baz', 'two'), ('qux', 'one'), ('qux', 'two')], dtype='object'), names=['A', 'B'])) def test_numeric_compat(self): idx = self.create_index() with pytest.raises(TypeError, match="cannot perform __mul__"): idx * 1 with pytest.raises(TypeError, match="cannot perform __rmul__"): 1 * idx div_err = "cannot perform __truediv__" with pytest.raises(TypeError, match=div_err): idx / 1 div_err = div_err.replace(' __', ' __r') with pytest.raises(TypeError, match=div_err): 1 / idx with pytest.raises(TypeError, match="cannot perform __floordiv__"): idx // 1 with pytest.raises(TypeError, match="cannot perform __rfloordiv__"): 1 // idx def test_logical_compat(self): idx = self.create_index() with pytest.raises(TypeError, match='cannot perform all'): idx.all() with pytest.raises(TypeError, match='cannot perform any'): idx.any() def test_boolean_context_compat(self): # boolean context compat idx = self.create_index() with pytest.raises(ValueError, match='The truth value of a'): if idx: pass def test_reindex_base(self): idx = self.create_index() expected = np.arange(idx.size, dtype=np.intp) actual = idx.get_indexer(idx) tm.assert_numpy_array_equal(expected, actual) with pytest.raises(ValueError, match='Invalid fill method'): idx.get_indexer(idx, method='invalid') def test_get_indexer_consistency(self): # See GH 16819 for name, index in self.indices.items(): if isinstance(index, IntervalIndex): continue if index.is_unique or isinstance(index, CategoricalIndex): indexer = index.get_indexer(index[0:2]) assert isinstance(indexer, np.ndarray) assert indexer.dtype == np.intp else: e = "Reindexing only valid with uniquely valued Index objects" with pytest.raises(InvalidIndexError, match=e): index.get_indexer(index[0:2]) indexer, _ = index.get_indexer_non_unique(index[0:2]) assert isinstance(indexer, np.ndarray) assert indexer.dtype == np.intp def test_ndarray_compat_properties(self): idx = self.create_index() assert idx.T.equals(idx) assert idx.transpose().equals(idx) values = idx.values for prop in self._compat_props: assert getattr(idx, prop) == getattr(values, prop) # test for validity idx.nbytes idx.values.nbytes def test_repr_roundtrip(self): idx = self.create_index() tm.assert_index_equal(eval(repr(idx)), idx) def test_str(self): # test the string repr idx = self.create_index() idx.name = 'foo' assert "'foo'" in str(idx) assert idx.__class__.__name__ in str(idx) def test_repr_max_seq_item_setting(self): # GH10182 idx = self.create_index() idx = idx.repeat(50) with pd.option_context("display.max_seq_items", None): repr(idx) assert '...' not in str(idx) def test_copy_name(self): # gh-12309: Check that the "name" argument # passed at initialization is honored. for name, index in self.indices.items(): if isinstance(index, MultiIndex): continue first = index.__class__(index, copy=True, name='mario') second = first.__class__(first, copy=False) # Even though "copy=False", we want a new object. assert first is not second # Not using tm.assert_index_equal() since names differ. assert index.equals(first) assert first.name == 'mario' assert second.name == 'mario' s1 = Series(2, index=first) s2 = Series(3, index=second[:-1]) if not isinstance(index, CategoricalIndex): # See gh-13365 s3 = s1 * s2 assert s3.index.name == 'mario' def test_ensure_copied_data(self): # Check the "copy" argument of each Index.__new__ is honoured # GH12309 for name, index in self.indices.items(): init_kwargs = {} if isinstance(index, PeriodIndex): # Needs "freq" specification: init_kwargs['freq'] = index.freq elif isinstance(index, (RangeIndex, MultiIndex, CategoricalIndex)): # RangeIndex cannot be initialized from data # MultiIndex and CategoricalIndex are tested separately continue index_type = index.__class__ result = index_type(index.values, copy=True, **init_kwargs) tm.assert_index_equal(index, result) tm.assert_numpy_array_equal(index._ndarray_values, result._ndarray_values, check_same='copy') if isinstance(index, PeriodIndex): # .values an object array of Period, thus copied result = index_type(ordinal=index.asi8, copy=False, **init_kwargs) tm.assert_numpy_array_equal(index._ndarray_values, result._ndarray_values, check_same='same') elif isinstance(index, IntervalIndex): # checked in test_interval.py pass else: result = index_type(index.values, copy=False, **init_kwargs) tm.assert_numpy_array_equal(index.values, result.values, check_same='same') tm.assert_numpy_array_equal(index._ndarray_values, result._ndarray_values, check_same='same') def test_memory_usage(self): for name, index in self.indices.items(): result = index.memory_usage() if len(index): index.get_loc(index[0]) result2 = index.memory_usage() result3 = index.memory_usage(deep=True) # RangeIndex, IntervalIndex # don't have engines if not isinstance(index, (RangeIndex, IntervalIndex)): assert result2 > result if index.inferred_type == 'object': assert result3 > result2 else: # we report 0 for no-length assert result == 0 def test_argsort(self): for k, ind in self.indices.items(): # separately tested if k in ['catIndex']: continue result = ind.argsort() expected = np.array(ind).argsort() tm.assert_numpy_array_equal(result, expected, check_dtype=False) def test_numpy_argsort(self): for k, ind in self.indices.items(): result = np.argsort(ind) expected = ind.argsort() tm.assert_numpy_array_equal(result, expected) # these are the only two types that perform # pandas compatibility input validation - the # rest already perform separate (or no) such # validation via their 'values' attribute as # defined in pandas.core.indexes/base.py - they # cannot be changed at the moment due to # backwards compatibility concerns if isinstance(type(ind), (CategoricalIndex, RangeIndex)): msg = "the 'axis' parameter is not supported" with pytest.raises(ValueError, match=msg): np.argsort(ind, axis=1) msg = "the 'kind' parameter is not supported" with pytest.raises(ValueError, match=msg): np.argsort(ind, kind='mergesort') msg = "the 'order' parameter is not supported" with pytest.raises(ValueError, match=msg): np.argsort(ind, order=('a', 'b')) def test_take(self): indexer = [4, 3, 0, 2] for k, ind in self.indices.items(): # separate if k in ['boolIndex', 'tuples', 'empty']: continue result = ind.take(indexer) expected = ind[indexer] assert result.equals(expected) if not isinstance(ind, (DatetimeIndex, PeriodIndex, TimedeltaIndex)): # GH 10791 with pytest.raises(AttributeError): ind.freq def test_take_invalid_kwargs(self): idx = self.create_index() indices = [1, 2] msg = r"take\(\) got an unexpected keyword argument 'foo'" with pytest.raises(TypeError, match=msg): idx.take(indices, foo=2) msg = "the 'out' parameter is not supported" with pytest.raises(ValueError, match=msg): idx.take(indices, out=indices) msg = "the 'mode' parameter is not supported" with pytest.raises(ValueError, match=msg): idx.take(indices, mode='clip') def test_repeat(self): rep = 2 i = self.create_index() expected = pd.Index(i.values.repeat(rep), name=i.name) tm.assert_index_equal(i.repeat(rep), expected) i = self.create_index() rep = np.arange(len(i)) expected = pd.Index(i.values.repeat(rep), name=i.name) tm.assert_index_equal(i.repeat(rep), expected) def test_numpy_repeat(self): rep = 2 i = self.create_index() expected = i.repeat(rep) tm.assert_index_equal(np.repeat(i, rep), expected) msg = "the 'axis' parameter is not supported" with pytest.raises(ValueError, match=msg): np.repeat(i, rep, axis=0) @pytest.mark.parametrize('klass', [list, tuple, np.array, Series]) def test_where(self, klass): i = self.create_index() cond = [True] * len(i) result = i.where(klass(cond)) expected = i tm.assert_index_equal(result, expected) cond = [False] + [True] * len(i[1:]) expected = pd.Index([i._na_value] + i[1:].tolist(), dtype=i.dtype) result = i.where(klass(cond)) tm.assert_index_equal(result, expected) @pytest.mark.parametrize("case", [0.5, "xxx"]) @pytest.mark.parametrize("method", ["intersection", "union", "difference", "symmetric_difference"]) def test_set_ops_error_cases(self, case, method): for name, idx in self.indices.items(): # non-iterable input msg = "Input must be Index or array-like" with pytest.raises(TypeError, match=msg): getattr(idx, method)(case) def test_intersection_base(self): for name, idx in self.indices.items(): first = idx[:5] second = idx[:3] intersect = first.intersection(second) if isinstance(idx, CategoricalIndex): pass else: assert tm.equalContents(intersect, second) # GH 10149 cases = [klass(second.values) for klass in [np.array, Series, list]] for case in cases: if isinstance(idx, CategoricalIndex): pass else: result = first.intersection(case) assert tm.equalContents(result, second) if isinstance(idx, MultiIndex): msg = "other must be a MultiIndex or a list of tuples" with pytest.raises(TypeError, match=msg): first.intersection([1, 2, 3]) def test_union_base(self): for name, idx in self.indices.items(): first = idx[3:] second = idx[:5] everything = idx union = first.union(second) assert tm.equalContents(union, everything) # GH 10149 cases = [klass(second.values) for klass in [np.array, Series, list]] for case in cases: if isinstance(idx, CategoricalIndex): pass else: result = first.union(case) assert tm.equalContents(result, everything) if isinstance(idx, MultiIndex): msg = "other must be a MultiIndex or a list of tuples" with pytest.raises(TypeError, match=msg): first.union([1, 2, 3]) @pytest.mark.parametrize("sort", [None, False]) def test_difference_base(self, sort): for name, idx in self.indices.items(): first = idx[2:] second = idx[:4] answer = idx[4:] result = first.difference(second, sort) if isinstance(idx, CategoricalIndex): pass else: assert tm.equalContents(result, answer) # GH 10149 cases = [klass(second.values) for klass in [np.array, Series, list]] for case in cases: if isinstance(idx, CategoricalIndex): pass elif isinstance(idx, (DatetimeIndex, TimedeltaIndex)): assert result.__class__ == answer.__class__ tm.assert_numpy_array_equal(result.sort_values().asi8, answer.sort_values().asi8) else: result = first.difference(case, sort) assert tm.equalContents(result, answer) if isinstance(idx, MultiIndex): msg = "other must be a MultiIndex or a list of tuples" with pytest.raises(TypeError, match=msg): first.difference([1, 2, 3], sort) def test_symmetric_difference(self): for name, idx in self.indices.items(): first = idx[1:] second = idx[:-1] if isinstance(idx, CategoricalIndex): pass else: answer = idx[[0, -1]] result = first.symmetric_difference(second) assert tm.equalContents(result, answer) # GH 10149 cases = [klass(second.values) for klass in [np.array, Series, list]] for case in cases: if isinstance(idx, CategoricalIndex): pass else: result = first.symmetric_difference(case) assert tm.equalContents(result, answer) if isinstance(idx, MultiIndex): msg = "other must be a MultiIndex or a list of tuples" with pytest.raises(TypeError, match=msg): first.symmetric_difference([1, 2, 3]) def test_insert_base(self): for name, idx in self.indices.items(): result = idx[1:4] if not len(idx): continue # test 0th element assert idx[0:4].equals(result.insert(0, idx[0])) def test_delete_base(self): for name, idx in self.indices.items(): if not len(idx): continue if isinstance(idx, RangeIndex): # tested in class continue expected = idx[1:] result = idx.delete(0) assert result.equals(expected) assert result.name == expected.name expected = idx[:-1] result = idx.delete(-1) assert result.equals(expected) assert result.name == expected.name with pytest.raises((IndexError, ValueError)): # either depending on numpy version idx.delete(len(idx)) def test_equals(self): for name, idx in self.indices.items(): assert idx.equals(idx) assert idx.equals(idx.copy()) assert idx.equals(idx.astype(object)) assert not idx.equals(list(idx)) assert not idx.equals(np.array(idx)) # Cannot pass in non-int64 dtype to RangeIndex if not isinstance(idx, RangeIndex): same_values = Index(idx, dtype=object) assert idx.equals(same_values) assert same_values.equals(idx) if idx.nlevels == 1: # do not test MultiIndex assert not idx.equals(pd.Series(idx)) def test_equals_op(self): # GH9947, GH10637 index_a = self.create_index() if isinstance(index_a, PeriodIndex): pytest.skip('Skip check for PeriodIndex') n = len(index_a) index_b = index_a[0:-1] index_c = index_a[0:-1].append(index_a[-2:-1]) index_d = index_a[0:1] msg = "Lengths must match|could not be broadcast" with pytest.raises(ValueError, match=msg): index_a == index_b expected1 = np.array([True] * n) expected2 = np.array([True] * (n - 1) + [False]) tm.assert_numpy_array_equal(index_a == index_a, expected1) tm.assert_numpy_array_equal(index_a == index_c, expected2) # test comparisons with numpy arrays array_a = np.array(index_a) array_b = np.array(index_a[0:-1]) array_c = np.array(index_a[0:-1].append(index_a[-2:-1])) array_d = np.array(index_a[0:1]) with pytest.raises(ValueError, match=msg): index_a == array_b tm.assert_numpy_array_equal(index_a == array_a, expected1) tm.assert_numpy_array_equal(index_a == array_c, expected2) # test comparisons with Series series_a = Series(array_a) series_b = Series(array_b) series_c = Series(array_c) series_d = Series(array_d) with pytest.raises(ValueError, match=msg): index_a == series_b tm.assert_numpy_array_equal(index_a == series_a, expected1) tm.assert_numpy_array_equal(index_a == series_c, expected2) # cases where length is 1 for one of them with pytest.raises(ValueError, match="Lengths must match"): index_a == index_d with pytest.raises(ValueError, match="Lengths must match"): index_a == series_d with pytest.raises(ValueError, match="Lengths must match"): index_a == array_d msg = "Can only compare identically-labeled Series objects" with pytest.raises(ValueError, match=msg): series_a == series_d with pytest.raises(ValueError, match="Lengths must match"): series_a == array_d # comparing with a scalar should broadcast; note that we are excluding # MultiIndex because in this case each item in the index is a tuple of # length 2, and therefore is considered an array of length 2 in the # comparison instead of a scalar if not isinstance(index_a, MultiIndex): expected3 = np.array([False] * (len(index_a) - 2) + [True, False]) # assuming the 2nd to last item is unique in the data item = index_a[-2] tm.assert_numpy_array_equal(index_a == item, expected3) tm.assert_series_equal(series_a == item, Series(expected3)) def test_hasnans_isnans(self): # GH 11343, added tests for hasnans / isnans for name, index in self.indices.items(): if isinstance(index, MultiIndex): pass else: idx = index.copy() # cases in indices doesn't include NaN expected = np.array([False] * len(idx), dtype=bool) tm.assert_numpy_array_equal(idx._isnan, expected) assert idx.hasnans is False idx = index.copy() values = np.asarray(idx.values) if len(index) == 0: continue elif isinstance(index, DatetimeIndexOpsMixin): values[1] = iNaT elif isinstance(index, (Int64Index, UInt64Index)): continue else: values[1] = np.nan if isinstance(index, PeriodIndex): idx = index.__class__(values, freq=index.freq) else: idx = index.__class__(values) expected = np.array([False] * len(idx), dtype=bool) expected[1] = True tm.assert_numpy_array_equal(idx._isnan, expected) assert idx.hasnans is True def test_fillna(self): # GH 11343 for name, index in self.indices.items(): if len(index) == 0: pass elif isinstance(index, MultiIndex): idx = index.copy() msg = "isna is not defined for MultiIndex" with pytest.raises(NotImplementedError, match=msg): idx.fillna(idx[0]) else: idx = index.copy() result = idx.fillna(idx[0]) tm.assert_index_equal(result, idx) assert result is not idx msg = "'value' must be a scalar, passed: " with pytest.raises(TypeError, match=msg): idx.fillna([idx[0]]) idx = index.copy() values = np.asarray(idx.values) if isinstance(index, DatetimeIndexOpsMixin): values[1] = iNaT elif isinstance(index, (Int64Index, UInt64Index)): continue else: values[1] = np.nan if isinstance(index, PeriodIndex): idx = index.__class__(values, freq=index.freq) else: idx = index.__class__(values) expected = np.array([False] * len(idx), dtype=bool) expected[1] = True tm.assert_numpy_array_equal(idx._isnan, expected) assert idx.hasnans is True def test_nulls(self): # this is really a smoke test for the methods # as these are adequately tested for function elsewhere for name, index in self.indices.items(): if len(index) == 0: tm.assert_numpy_array_equal( index.isna(), np.array([], dtype=bool)) elif isinstance(index, MultiIndex): idx = index.copy() msg = "isna is not defined for MultiIndex" with pytest.raises(NotImplementedError, match=msg): idx.isna() else: if not index.hasnans: tm.assert_numpy_array_equal( index.isna(), np.zeros(len(index), dtype=bool)) tm.assert_numpy_array_equal( index.notna(), np.ones(len(index), dtype=bool)) else: result = isna(index) tm.assert_numpy_array_equal(index.isna(), result) tm.assert_numpy_array_equal(index.notna(), ~result) def test_empty(self): # GH 15270 index = self.create_index() assert not index.empty assert index[:0].empty def test_join_self_unique(self, join_type): index = self.create_index() if index.is_unique: joined = index.join(index, how=join_type) assert (index == joined).all() def test_map(self): # callable index = self.create_index() # we don't infer UInt64 if isinstance(index, pd.UInt64Index): expected = index.astype('int64') else: expected = index result = index.map(lambda x: x) tm.assert_index_equal(result, expected) @pytest.mark.parametrize( "mapper", [ lambda values, index: {i: e for e, i in zip(values, index)}, lambda values, index: pd.Series(values, index)]) def test_map_dictlike(self, mapper): index = self.create_index() if isinstance(index, (pd.CategoricalIndex, pd.IntervalIndex)): pytest.skip("skipping tests for {}".format(type(index))) identity = mapper(index.values, index) # we don't infer to UInt64 for a dict if isinstance(index, pd.UInt64Index) and isinstance(identity, dict): expected = index.astype('int64') else: expected = index result = index.map(identity) tm.assert_index_equal(result, expected) # empty mappable expected = pd.Index([np.nan] * len(index)) result = index.map(mapper(expected, index)) tm.assert_index_equal(result, expected) def test_putmask_with_wrong_mask(self): # GH18368 index = self.create_index() with pytest.raises(ValueError): index.putmask(np.ones(len(index) + 1, np.bool), 1) with pytest.raises(ValueError): index.putmask(np.ones(len(index) - 1, np.bool), 1) with pytest.raises(ValueError): index.putmask('foo', 1) @pytest.mark.parametrize('copy', [True, False]) @pytest.mark.parametrize('name', [None, 'foo']) @pytest.mark.parametrize('ordered', [True, False]) def test_astype_category(self, copy, name, ordered): # GH 18630 index = self.create_index() if name: index = index.rename(name) # standard categories dtype = CategoricalDtype(ordered=ordered) result = index.astype(dtype, copy=copy) expected = CategoricalIndex(index.values, name=name, ordered=ordered) tm.assert_index_equal(result, expected) # non-standard categories dtype = CategoricalDtype(index.unique().tolist()[:-1], ordered) result = index.astype(dtype, copy=copy) expected = CategoricalIndex(index.values, name=name, dtype=dtype) tm.assert_index_equal(result, expected) if ordered is False: # dtype='category' defaults to ordered=False, so only test once result = index.astype('category', copy=copy) expected = CategoricalIndex(index.values, name=name) tm.assert_index_equal(result, expected) def test_is_unique(self): # initialize a unique index index = self.create_index().drop_duplicates() assert index.is_unique is True # empty index should be unique index_empty = index[:0] assert index_empty.is_unique is True # test basic dupes index_dup = index.insert(0, index[0]) assert index_dup.is_unique is False # single NA should be unique index_na = index.insert(0, np.nan) assert index_na.is_unique is True # multiple NA should not be unique index_na_dup = index_na.insert(0, np.nan) assert index_na_dup.is_unique is False
import json import numpy as np import pytest from pandas import DataFrame, Index import pandas.util.testing as tm from pandas.io.json import json_normalize from pandas.io.json.normalize import nested_to_record @pytest.fixture def deep_nested(): # deeply nested data return [{'country': 'USA', 'states': [{'name': 'California', 'cities': [{'name': 'San Francisco', 'pop': 12345}, {'name': 'Los Angeles', 'pop': 12346}] }, {'name': 'Ohio', 'cities': [{'name': 'Columbus', 'pop': 1234}, {'name': 'Cleveland', 'pop': 1236}]} ] }, {'country': 'Germany', 'states': [{'name': 'Bayern', 'cities': [{'name': 'Munich', 'pop': 12347}] }, {'name': 'Nordrhein-Westfalen', 'cities': [{'name': 'Duesseldorf', 'pop': 1238}, {'name': 'Koeln', 'pop': 1239}]} ] } ] @pytest.fixture def state_data(): return [ {'counties': [{'name': 'Dade', 'population': 12345}, {'name': 'Broward', 'population': 40000}, {'name': 'Palm Beach', 'population': 60000}], 'info': {'governor': 'Rick Scott'}, 'shortname': 'FL', 'state': 'Florida'}, {'counties': [{'name': 'Summit', 'population': 1234}, {'name': 'Cuyahoga', 'population': 1337}], 'info': {'governor': 'John Kasich'}, 'shortname': 'OH', 'state': 'Ohio'}] @pytest.fixture def author_missing_data(): return [ {'info': None}, {'info': {'created_at': '11/08/1993', 'last_updated': '26/05/2012'}, 'author_name': {'first': 'Jane', 'last_name': 'Doe'} }] @pytest.fixture def missing_metadata(): return [ {'name': 'Alice', 'addresses': [{'number': 9562, 'street': 'Morris St.', 'city': 'Massillon', 'state': 'OH', 'zip': 44646}] }, {'addresses': [{'number': 8449, 'street': 'Spring St.', 'city': 'Elizabethton', 'state': 'TN', 'zip': 37643}] } ] class TestJSONNormalize: def test_simple_records(self): recs = [{'a': 1, 'b': 2, 'c': 3}, {'a': 4, 'b': 5, 'c': 6}, {'a': 7, 'b': 8, 'c': 9}, {'a': 10, 'b': 11, 'c': 12}] result = json_normalize(recs) expected = DataFrame(recs) tm.assert_frame_equal(result, expected) def test_simple_normalize(self, state_data): result = json_normalize(state_data[0], 'counties') expected = DataFrame(state_data[0]['counties']) tm.assert_frame_equal(result, expected) result = json_normalize(state_data, 'counties') expected = [] for rec in state_data: expected.extend(rec['counties']) expected = DataFrame(expected) tm.assert_frame_equal(result, expected) result = json_normalize(state_data, 'counties', meta='state') expected['state'] = np.array(['Florida', 'Ohio']).repeat([3, 2]) tm.assert_frame_equal(result, expected) def test_empty_array(self): result = json_normalize([]) expected = DataFrame() tm.assert_frame_equal(result, expected) def test_simple_normalize_with_separator(self, deep_nested): # GH 14883 result = json_normalize({'A': {'A': 1, 'B': 2}}) expected = DataFrame([[1, 2]], columns=['A.A', 'A.B']) tm.assert_frame_equal(result.reindex_like(expected), expected) result = json_normalize({'A': {'A': 1, 'B': 2}}, sep='_') expected = DataFrame([[1, 2]], columns=['A_A', 'A_B']) tm.assert_frame_equal(result.reindex_like(expected), expected) result = json_normalize({'A': {'A': 1, 'B': 2}}, sep='\u03c3') expected = DataFrame([[1, 2]], columns=['A\u03c3A', 'A\u03c3B']) tm.assert_frame_equal(result.reindex_like(expected), expected) result = json_normalize(deep_nested, ['states', 'cities'], meta=['country', ['states', 'name']], sep='_') expected = Index(['name', 'pop', 'country', 'states_name']).sort_values() assert result.columns.sort_values().equals(expected) def test_value_array_record_prefix(self): # GH 21536 result = json_normalize({'A': [1, 2]}, 'A', record_prefix='Prefix.') expected = DataFrame([[1], [2]], columns=['Prefix.0']) tm.assert_frame_equal(result, expected) def test_nested_object_record_path(self): # GH 22706 data = {'state': 'Florida', 'info': { 'governor': 'Rick Scott', 'counties': [{'name': 'Dade', 'population': 12345}, {'name': 'Broward', 'population': 40000}, {'name': 'Palm Beach', 'population': 60000}]}} result = json_normalize(data, record_path=["info", "counties"]) expected = DataFrame([['Dade', 12345], ['Broward', 40000], ['Palm Beach', 60000]], columns=['name', 'population']) tm.assert_frame_equal(result, expected) def test_more_deeply_nested(self, deep_nested): result = json_normalize(deep_nested, ['states', 'cities'], meta=['country', ['states', 'name']]) # meta_prefix={'states': 'state_'}) ex_data = {'country': ['USA'] * 4 + ['Germany'] * 3, 'states.name': ['California', 'California', 'Ohio', 'Ohio', 'Bayern', 'Nordrhein-Westfalen', 'Nordrhein-Westfalen'], 'name': ['San Francisco', 'Los Angeles', 'Columbus', 'Cleveland', 'Munich', 'Duesseldorf', 'Koeln'], 'pop': [12345, 12346, 1234, 1236, 12347, 1238, 1239]} expected = DataFrame(ex_data, columns=result.columns) tm.assert_frame_equal(result, expected) def test_shallow_nested(self): data = [{'state': 'Florida', 'shortname': 'FL', 'info': { 'governor': 'Rick Scott' }, 'counties': [{'name': 'Dade', 'population': 12345}, {'name': 'Broward', 'population': 40000}, {'name': 'Palm Beach', 'population': 60000}]}, {'state': 'Ohio', 'shortname': 'OH', 'info': { 'governor': 'John Kasich' }, 'counties': [{'name': 'Summit', 'population': 1234}, {'name': 'Cuyahoga', 'population': 1337}]}] result = json_normalize(data, 'counties', ['state', 'shortname', ['info', 'governor']]) ex_data = {'name': ['Dade', 'Broward', 'Palm Beach', 'Summit', 'Cuyahoga'], 'state': ['Florida'] * 3 + ['Ohio'] * 2, 'shortname': ['FL', 'FL', 'FL', 'OH', 'OH'], 'info.governor': ['Rick Scott'] * 3 + ['John Kasich'] * 2, 'population': [12345, 40000, 60000, 1234, 1337]} expected = DataFrame(ex_data, columns=result.columns) tm.assert_frame_equal(result, expected) def test_meta_name_conflict(self): data = [{'foo': 'hello', 'bar': 'there', 'data': [{'foo': 'something', 'bar': 'else'}, {'foo': 'something2', 'bar': 'else2'}]}] msg = (r"Conflicting metadata name (foo|bar)," " need distinguishing prefix") with pytest.raises(ValueError, match=msg): json_normalize(data, 'data', meta=['foo', 'bar']) result = json_normalize(data, 'data', meta=['foo', 'bar'], meta_prefix='meta') for val in ['metafoo', 'metabar', 'foo', 'bar']: assert val in result def test_meta_parameter_not_modified(self): # GH 18610 data = [{'foo': 'hello', 'bar': 'there', 'data': [{'foo': 'something', 'bar': 'else'}, {'foo': 'something2', 'bar': 'else2'}]}] COLUMNS = ['foo', 'bar'] result = json_normalize(data, 'data', meta=COLUMNS, meta_prefix='meta') assert COLUMNS == ['foo', 'bar'] for val in ['metafoo', 'metabar', 'foo', 'bar']: assert val in result def test_record_prefix(self, state_data): result = json_normalize(state_data[0], 'counties') expected = DataFrame(state_data[0]['counties']) tm.assert_frame_equal(result, expected) result = json_normalize(state_data, 'counties', meta='state', record_prefix='county_') expected = [] for rec in state_data: expected.extend(rec['counties']) expected = DataFrame(expected) expected = expected.rename(columns=lambda x: 'county_' + x) expected['state'] = np.array(['Florida', 'Ohio']).repeat([3, 2]) tm.assert_frame_equal(result, expected) def test_non_ascii_key(self): testjson = ( b'[{"\xc3\x9cnic\xc3\xb8de":0,"sub":{"A":1, "B":2}},' + b'{"\xc3\x9cnic\xc3\xb8de":1,"sub":{"A":3, "B":4}}]' ).decode('utf8') testdata = { 'sub.A': [1, 3], 'sub.B': [2, 4], b"\xc3\x9cnic\xc3\xb8de".decode('utf8'): [0, 1] } expected = DataFrame(testdata) result = json_normalize(json.loads(testjson)) tm.assert_frame_equal(result, expected) def test_missing_field(self, author_missing_data): # GH20030: result = json_normalize(author_missing_data) ex_data = [ {'info': np.nan, 'author_name.first': np.nan, 'author_name.last_name': np.nan, 'info.created_at': np.nan, 'info.last_updated': np.nan}, {'info': None, 'author_name.first': 'Jane', 'author_name.last_name': 'Doe', 'info.created_at': '11/08/1993', 'info.last_updated': '26/05/2012'} ] expected = DataFrame(ex_data) tm.assert_frame_equal(result, expected) class TestNestedToRecord: def test_flat_stays_flat(self): recs = [dict(flat1=1, flat2=2), dict(flat1=3, flat2=4), ] result = nested_to_record(recs) expected = recs assert result == expected def test_one_level_deep_flattens(self): data = dict(flat1=1, dict1=dict(c=1, d=2)) result = nested_to_record(data) expected = {'dict1.c': 1, 'dict1.d': 2, 'flat1': 1} assert result == expected def test_nested_flattens(self): data = dict(flat1=1, dict1=dict(c=1, d=2), nested=dict(e=dict(c=1, d=2), d=2)) result = nested_to_record(data) expected = {'dict1.c': 1, 'dict1.d': 2, 'flat1': 1, 'nested.d': 2, 'nested.e.c': 1, 'nested.e.d': 2} assert result == expected def test_json_normalize_errors(self, missing_metadata): # GH14583: # If meta keys are not always present a new option to set # errors='ignore' has been implemented msg = ("Try running with errors='ignore' as key 'name'" " is not always present") with pytest.raises(KeyError, match=msg): json_normalize( data=missing_metadata, record_path='addresses', meta='name', errors='raise') def test_missing_meta(self, missing_metadata): # GH25468 # If metadata is nullable with errors set to ignore, the null values # should be numpy.nan values result = json_normalize( data=missing_metadata, record_path='addresses', meta='name', errors='ignore') ex_data = [ {'city': 'Massillon', 'number': 9562, 'state': 'OH', 'street': 'Morris St.', 'zip': 44646, 'name': 'Alice'}, {'city': 'Elizabethton', 'number': 8449, 'state': 'TN', 'street': 'Spring St.', 'zip': 37643, 'name': np.nan} ] ex_data = [ ['Massillon', 9562, 'OH', 'Morris St.', 44646, 'Alice'], ['Elizabethton', 8449, 'TN', 'Spring St.', 37643, np.nan] ] columns = ['city', 'number', 'state', 'street', 'zip', 'name'] expected = DataFrame(ex_data, columns=columns) tm.assert_frame_equal(result, expected) def test_donot_drop_nonevalues(self): # GH21356 data = [ {'info': None, 'author_name': {'first': 'Smith', 'last_name': 'Appleseed'} }, {'info': {'created_at': '11/08/1993', 'last_updated': '26/05/2012'}, 'author_name': {'first': 'Jane', 'last_name': 'Doe'} } ] result = nested_to_record(data) expected = [ {'info': None, 'author_name.first': 'Smith', 'author_name.last_name': 'Appleseed'}, {'author_name.first': 'Jane', 'author_name.last_name': 'Doe', 'info.created_at': '11/08/1993', 'info.last_updated': '26/05/2012'}] assert result == expected def test_nonetype_top_level_bottom_level(self): # GH21158: If inner level json has a key with a null value # make sure it doesnt do a new_d.pop twice and except data = { "id": None, "location": { "country": { "state": { "id": None, "town.info": { "id": None, "region": None, "x": 49.151580810546875, "y": -33.148521423339844, "z": 27.572303771972656}}} } } result = nested_to_record(data) expected = { 'id': None, 'location.country.state.id': None, 'location.country.state.town.info.id': None, 'location.country.state.town.info.region': None, 'location.country.state.town.info.x': 49.151580810546875, 'location.country.state.town.info.y': -33.148521423339844, 'location.country.state.town.info.z': 27.572303771972656} assert result == expected def test_nonetype_multiple_levels(self): # GH21158: If inner level json has a key with a null value # make sure it doesnt do a new_d.pop twice and except data = { "id": None, "location": { "id": None, "country": { "id": None, "state": { "id": None, "town.info": { "region": None, "x": 49.151580810546875, "y": -33.148521423339844, "z": 27.572303771972656}}} } } result = nested_to_record(data) expected = { 'id': None, 'location.id': None, 'location.country.id': None, 'location.country.state.id': None, 'location.country.state.town.info.region': None, 'location.country.state.town.info.x': 49.151580810546875, 'location.country.state.town.info.y': -33.148521423339844, 'location.country.state.town.info.z': 27.572303771972656} assert result == expected
cbertinato/pandas
pandas/tests/io/json/test_normalize.py
pandas/tests/indexes/common.py
import numpy as np from pandas._libs import algos as libalgos, index as libindex import pandas.util.testing as tm class TestNumericEngine: def test_is_monotonic(self, numeric_indexing_engine_type_and_dtype): engine_type, dtype = numeric_indexing_engine_type_and_dtype num = 1000 arr = np.array([1] * num + [2] * num + [3] * num, dtype=dtype) # monotonic increasing engine = engine_type(lambda: arr, len(arr)) assert engine.is_monotonic_increasing is True assert engine.is_monotonic_decreasing is False # monotonic decreasing engine = engine_type(lambda: arr[::-1], len(arr)) assert engine.is_monotonic_increasing is False assert engine.is_monotonic_decreasing is True # neither monotonic increasing or decreasing arr = np.array([1] * num + [2] * num + [1] * num, dtype=dtype) engine = engine_type(lambda: arr[::-1], len(arr)) assert engine.is_monotonic_increasing is False assert engine.is_monotonic_decreasing is False def test_is_unique(self, numeric_indexing_engine_type_and_dtype): engine_type, dtype = numeric_indexing_engine_type_and_dtype # unique arr = np.array([1, 3, 2], dtype=dtype) engine = engine_type(lambda: arr, len(arr)) assert engine.is_unique is True # not unique arr = np.array([1, 2, 1], dtype=dtype) engine = engine_type(lambda: arr, len(arr)) assert engine.is_unique is False def test_get_loc(self, numeric_indexing_engine_type_and_dtype): engine_type, dtype = numeric_indexing_engine_type_and_dtype # unique arr = np.array([1, 2, 3], dtype=dtype) engine = engine_type(lambda: arr, len(arr)) assert engine.get_loc(2) == 1 # monotonic num = 1000 arr = np.array([1] * num + [2] * num + [3] * num, dtype=dtype) engine = engine_type(lambda: arr, len(arr)) assert engine.get_loc(2) == slice(1000, 2000) # not monotonic arr = np.array([1, 2, 3] * num, dtype=dtype) engine = engine_type(lambda: arr, len(arr)) expected = np.array([False, True, False] * num, dtype=bool) result = engine.get_loc(2) assert (result == expected).all() def test_get_backfill_indexer( self, numeric_indexing_engine_type_and_dtype): engine_type, dtype = numeric_indexing_engine_type_and_dtype arr = np.array([1, 5, 10], dtype=dtype) engine = engine_type(lambda: arr, len(arr)) new = np.arange(12, dtype=dtype) result = engine.get_backfill_indexer(new) expected = libalgos.backfill(arr, new) tm.assert_numpy_array_equal(result, expected) def test_get_pad_indexer( self, numeric_indexing_engine_type_and_dtype): engine_type, dtype = numeric_indexing_engine_type_and_dtype arr = np.array([1, 5, 10], dtype=dtype) engine = engine_type(lambda: arr, len(arr)) new = np.arange(12, dtype=dtype) result = engine.get_pad_indexer(new) expected = libalgos.pad(arr, new) tm.assert_numpy_array_equal(result, expected) class TestObjectEngine: engine_type = libindex.ObjectEngine dtype = np.object_ values = list('abc') def test_is_monotonic(self): num = 1000 arr = np.array(['a'] * num + ['a'] * num + ['c'] * num, dtype=self.dtype) # monotonic increasing engine = self.engine_type(lambda: arr, len(arr)) assert engine.is_monotonic_increasing is True assert engine.is_monotonic_decreasing is False # monotonic decreasing engine = self.engine_type(lambda: arr[::-1], len(arr)) assert engine.is_monotonic_increasing is False assert engine.is_monotonic_decreasing is True # neither monotonic increasing or decreasing arr = np.array(['a'] * num + ['b'] * num + ['a'] * num, dtype=self.dtype) engine = self.engine_type(lambda: arr[::-1], len(arr)) assert engine.is_monotonic_increasing is False assert engine.is_monotonic_decreasing is False def test_is_unique(self): # unique arr = np.array(self.values, dtype=self.dtype) engine = self.engine_type(lambda: arr, len(arr)) assert engine.is_unique is True # not unique arr = np.array(['a', 'b', 'a'], dtype=self.dtype) engine = self.engine_type(lambda: arr, len(arr)) assert engine.is_unique is False def test_get_loc(self): # unique arr = np.array(self.values, dtype=self.dtype) engine = self.engine_type(lambda: arr, len(arr)) assert engine.get_loc('b') == 1 # monotonic num = 1000 arr = np.array(['a'] * num + ['b'] * num + ['c'] * num, dtype=self.dtype) engine = self.engine_type(lambda: arr, len(arr)) assert engine.get_loc('b') == slice(1000, 2000) # not monotonic arr = np.array(self.values * num, dtype=self.dtype) engine = self.engine_type(lambda: arr, len(arr)) expected = np.array([False, True, False] * num, dtype=bool) result = engine.get_loc('b') assert (result == expected).all() def test_get_backfill_indexer(self): arr = np.array(['a', 'e', 'j'], dtype=self.dtype) engine = self.engine_type(lambda: arr, len(arr)) new = np.array(list('abcdefghij'), dtype=self.dtype) result = engine.get_backfill_indexer(new) expected = libalgos.backfill["object"](arr, new) tm.assert_numpy_array_equal(result, expected) def test_get_pad_indexer(self): arr = np.array(['a', 'e', 'j'], dtype=self.dtype) engine = self.engine_type(lambda: arr, len(arr)) new = np.array(list('abcdefghij'), dtype=self.dtype) result = engine.get_pad_indexer(new) expected = libalgos.pad["object"](arr, new) tm.assert_numpy_array_equal(result, expected)
import json import numpy as np import pytest from pandas import DataFrame, Index import pandas.util.testing as tm from pandas.io.json import json_normalize from pandas.io.json.normalize import nested_to_record @pytest.fixture def deep_nested(): # deeply nested data return [{'country': 'USA', 'states': [{'name': 'California', 'cities': [{'name': 'San Francisco', 'pop': 12345}, {'name': 'Los Angeles', 'pop': 12346}] }, {'name': 'Ohio', 'cities': [{'name': 'Columbus', 'pop': 1234}, {'name': 'Cleveland', 'pop': 1236}]} ] }, {'country': 'Germany', 'states': [{'name': 'Bayern', 'cities': [{'name': 'Munich', 'pop': 12347}] }, {'name': 'Nordrhein-Westfalen', 'cities': [{'name': 'Duesseldorf', 'pop': 1238}, {'name': 'Koeln', 'pop': 1239}]} ] } ] @pytest.fixture def state_data(): return [ {'counties': [{'name': 'Dade', 'population': 12345}, {'name': 'Broward', 'population': 40000}, {'name': 'Palm Beach', 'population': 60000}], 'info': {'governor': 'Rick Scott'}, 'shortname': 'FL', 'state': 'Florida'}, {'counties': [{'name': 'Summit', 'population': 1234}, {'name': 'Cuyahoga', 'population': 1337}], 'info': {'governor': 'John Kasich'}, 'shortname': 'OH', 'state': 'Ohio'}] @pytest.fixture def author_missing_data(): return [ {'info': None}, {'info': {'created_at': '11/08/1993', 'last_updated': '26/05/2012'}, 'author_name': {'first': 'Jane', 'last_name': 'Doe'} }] @pytest.fixture def missing_metadata(): return [ {'name': 'Alice', 'addresses': [{'number': 9562, 'street': 'Morris St.', 'city': 'Massillon', 'state': 'OH', 'zip': 44646}] }, {'addresses': [{'number': 8449, 'street': 'Spring St.', 'city': 'Elizabethton', 'state': 'TN', 'zip': 37643}] } ] class TestJSONNormalize: def test_simple_records(self): recs = [{'a': 1, 'b': 2, 'c': 3}, {'a': 4, 'b': 5, 'c': 6}, {'a': 7, 'b': 8, 'c': 9}, {'a': 10, 'b': 11, 'c': 12}] result = json_normalize(recs) expected = DataFrame(recs) tm.assert_frame_equal(result, expected) def test_simple_normalize(self, state_data): result = json_normalize(state_data[0], 'counties') expected = DataFrame(state_data[0]['counties']) tm.assert_frame_equal(result, expected) result = json_normalize(state_data, 'counties') expected = [] for rec in state_data: expected.extend(rec['counties']) expected = DataFrame(expected) tm.assert_frame_equal(result, expected) result = json_normalize(state_data, 'counties', meta='state') expected['state'] = np.array(['Florida', 'Ohio']).repeat([3, 2]) tm.assert_frame_equal(result, expected) def test_empty_array(self): result = json_normalize([]) expected = DataFrame() tm.assert_frame_equal(result, expected) def test_simple_normalize_with_separator(self, deep_nested): # GH 14883 result = json_normalize({'A': {'A': 1, 'B': 2}}) expected = DataFrame([[1, 2]], columns=['A.A', 'A.B']) tm.assert_frame_equal(result.reindex_like(expected), expected) result = json_normalize({'A': {'A': 1, 'B': 2}}, sep='_') expected = DataFrame([[1, 2]], columns=['A_A', 'A_B']) tm.assert_frame_equal(result.reindex_like(expected), expected) result = json_normalize({'A': {'A': 1, 'B': 2}}, sep='\u03c3') expected = DataFrame([[1, 2]], columns=['A\u03c3A', 'A\u03c3B']) tm.assert_frame_equal(result.reindex_like(expected), expected) result = json_normalize(deep_nested, ['states', 'cities'], meta=['country', ['states', 'name']], sep='_') expected = Index(['name', 'pop', 'country', 'states_name']).sort_values() assert result.columns.sort_values().equals(expected) def test_value_array_record_prefix(self): # GH 21536 result = json_normalize({'A': [1, 2]}, 'A', record_prefix='Prefix.') expected = DataFrame([[1], [2]], columns=['Prefix.0']) tm.assert_frame_equal(result, expected) def test_nested_object_record_path(self): # GH 22706 data = {'state': 'Florida', 'info': { 'governor': 'Rick Scott', 'counties': [{'name': 'Dade', 'population': 12345}, {'name': 'Broward', 'population': 40000}, {'name': 'Palm Beach', 'population': 60000}]}} result = json_normalize(data, record_path=["info", "counties"]) expected = DataFrame([['Dade', 12345], ['Broward', 40000], ['Palm Beach', 60000]], columns=['name', 'population']) tm.assert_frame_equal(result, expected) def test_more_deeply_nested(self, deep_nested): result = json_normalize(deep_nested, ['states', 'cities'], meta=['country', ['states', 'name']]) # meta_prefix={'states': 'state_'}) ex_data = {'country': ['USA'] * 4 + ['Germany'] * 3, 'states.name': ['California', 'California', 'Ohio', 'Ohio', 'Bayern', 'Nordrhein-Westfalen', 'Nordrhein-Westfalen'], 'name': ['San Francisco', 'Los Angeles', 'Columbus', 'Cleveland', 'Munich', 'Duesseldorf', 'Koeln'], 'pop': [12345, 12346, 1234, 1236, 12347, 1238, 1239]} expected = DataFrame(ex_data, columns=result.columns) tm.assert_frame_equal(result, expected) def test_shallow_nested(self): data = [{'state': 'Florida', 'shortname': 'FL', 'info': { 'governor': 'Rick Scott' }, 'counties': [{'name': 'Dade', 'population': 12345}, {'name': 'Broward', 'population': 40000}, {'name': 'Palm Beach', 'population': 60000}]}, {'state': 'Ohio', 'shortname': 'OH', 'info': { 'governor': 'John Kasich' }, 'counties': [{'name': 'Summit', 'population': 1234}, {'name': 'Cuyahoga', 'population': 1337}]}] result = json_normalize(data, 'counties', ['state', 'shortname', ['info', 'governor']]) ex_data = {'name': ['Dade', 'Broward', 'Palm Beach', 'Summit', 'Cuyahoga'], 'state': ['Florida'] * 3 + ['Ohio'] * 2, 'shortname': ['FL', 'FL', 'FL', 'OH', 'OH'], 'info.governor': ['Rick Scott'] * 3 + ['John Kasich'] * 2, 'population': [12345, 40000, 60000, 1234, 1337]} expected = DataFrame(ex_data, columns=result.columns) tm.assert_frame_equal(result, expected) def test_meta_name_conflict(self): data = [{'foo': 'hello', 'bar': 'there', 'data': [{'foo': 'something', 'bar': 'else'}, {'foo': 'something2', 'bar': 'else2'}]}] msg = (r"Conflicting metadata name (foo|bar)," " need distinguishing prefix") with pytest.raises(ValueError, match=msg): json_normalize(data, 'data', meta=['foo', 'bar']) result = json_normalize(data, 'data', meta=['foo', 'bar'], meta_prefix='meta') for val in ['metafoo', 'metabar', 'foo', 'bar']: assert val in result def test_meta_parameter_not_modified(self): # GH 18610 data = [{'foo': 'hello', 'bar': 'there', 'data': [{'foo': 'something', 'bar': 'else'}, {'foo': 'something2', 'bar': 'else2'}]}] COLUMNS = ['foo', 'bar'] result = json_normalize(data, 'data', meta=COLUMNS, meta_prefix='meta') assert COLUMNS == ['foo', 'bar'] for val in ['metafoo', 'metabar', 'foo', 'bar']: assert val in result def test_record_prefix(self, state_data): result = json_normalize(state_data[0], 'counties') expected = DataFrame(state_data[0]['counties']) tm.assert_frame_equal(result, expected) result = json_normalize(state_data, 'counties', meta='state', record_prefix='county_') expected = [] for rec in state_data: expected.extend(rec['counties']) expected = DataFrame(expected) expected = expected.rename(columns=lambda x: 'county_' + x) expected['state'] = np.array(['Florida', 'Ohio']).repeat([3, 2]) tm.assert_frame_equal(result, expected) def test_non_ascii_key(self): testjson = ( b'[{"\xc3\x9cnic\xc3\xb8de":0,"sub":{"A":1, "B":2}},' + b'{"\xc3\x9cnic\xc3\xb8de":1,"sub":{"A":3, "B":4}}]' ).decode('utf8') testdata = { 'sub.A': [1, 3], 'sub.B': [2, 4], b"\xc3\x9cnic\xc3\xb8de".decode('utf8'): [0, 1] } expected = DataFrame(testdata) result = json_normalize(json.loads(testjson)) tm.assert_frame_equal(result, expected) def test_missing_field(self, author_missing_data): # GH20030: result = json_normalize(author_missing_data) ex_data = [ {'info': np.nan, 'author_name.first': np.nan, 'author_name.last_name': np.nan, 'info.created_at': np.nan, 'info.last_updated': np.nan}, {'info': None, 'author_name.first': 'Jane', 'author_name.last_name': 'Doe', 'info.created_at': '11/08/1993', 'info.last_updated': '26/05/2012'} ] expected = DataFrame(ex_data) tm.assert_frame_equal(result, expected) class TestNestedToRecord: def test_flat_stays_flat(self): recs = [dict(flat1=1, flat2=2), dict(flat1=3, flat2=4), ] result = nested_to_record(recs) expected = recs assert result == expected def test_one_level_deep_flattens(self): data = dict(flat1=1, dict1=dict(c=1, d=2)) result = nested_to_record(data) expected = {'dict1.c': 1, 'dict1.d': 2, 'flat1': 1} assert result == expected def test_nested_flattens(self): data = dict(flat1=1, dict1=dict(c=1, d=2), nested=dict(e=dict(c=1, d=2), d=2)) result = nested_to_record(data) expected = {'dict1.c': 1, 'dict1.d': 2, 'flat1': 1, 'nested.d': 2, 'nested.e.c': 1, 'nested.e.d': 2} assert result == expected def test_json_normalize_errors(self, missing_metadata): # GH14583: # If meta keys are not always present a new option to set # errors='ignore' has been implemented msg = ("Try running with errors='ignore' as key 'name'" " is not always present") with pytest.raises(KeyError, match=msg): json_normalize( data=missing_metadata, record_path='addresses', meta='name', errors='raise') def test_missing_meta(self, missing_metadata): # GH25468 # If metadata is nullable with errors set to ignore, the null values # should be numpy.nan values result = json_normalize( data=missing_metadata, record_path='addresses', meta='name', errors='ignore') ex_data = [ {'city': 'Massillon', 'number': 9562, 'state': 'OH', 'street': 'Morris St.', 'zip': 44646, 'name': 'Alice'}, {'city': 'Elizabethton', 'number': 8449, 'state': 'TN', 'street': 'Spring St.', 'zip': 37643, 'name': np.nan} ] ex_data = [ ['Massillon', 9562, 'OH', 'Morris St.', 44646, 'Alice'], ['Elizabethton', 8449, 'TN', 'Spring St.', 37643, np.nan] ] columns = ['city', 'number', 'state', 'street', 'zip', 'name'] expected = DataFrame(ex_data, columns=columns) tm.assert_frame_equal(result, expected) def test_donot_drop_nonevalues(self): # GH21356 data = [ {'info': None, 'author_name': {'first': 'Smith', 'last_name': 'Appleseed'} }, {'info': {'created_at': '11/08/1993', 'last_updated': '26/05/2012'}, 'author_name': {'first': 'Jane', 'last_name': 'Doe'} } ] result = nested_to_record(data) expected = [ {'info': None, 'author_name.first': 'Smith', 'author_name.last_name': 'Appleseed'}, {'author_name.first': 'Jane', 'author_name.last_name': 'Doe', 'info.created_at': '11/08/1993', 'info.last_updated': '26/05/2012'}] assert result == expected def test_nonetype_top_level_bottom_level(self): # GH21158: If inner level json has a key with a null value # make sure it doesnt do a new_d.pop twice and except data = { "id": None, "location": { "country": { "state": { "id": None, "town.info": { "id": None, "region": None, "x": 49.151580810546875, "y": -33.148521423339844, "z": 27.572303771972656}}} } } result = nested_to_record(data) expected = { 'id': None, 'location.country.state.id': None, 'location.country.state.town.info.id': None, 'location.country.state.town.info.region': None, 'location.country.state.town.info.x': 49.151580810546875, 'location.country.state.town.info.y': -33.148521423339844, 'location.country.state.town.info.z': 27.572303771972656} assert result == expected def test_nonetype_multiple_levels(self): # GH21158: If inner level json has a key with a null value # make sure it doesnt do a new_d.pop twice and except data = { "id": None, "location": { "id": None, "country": { "id": None, "state": { "id": None, "town.info": { "region": None, "x": 49.151580810546875, "y": -33.148521423339844, "z": 27.572303771972656}}} } } result = nested_to_record(data) expected = { 'id': None, 'location.id': None, 'location.country.id': None, 'location.country.state.id': None, 'location.country.state.town.info.region': None, 'location.country.state.town.info.x': 49.151580810546875, 'location.country.state.town.info.y': -33.148521423339844, 'location.country.state.town.info.z': 27.572303771972656} assert result == expected
cbertinato/pandas
pandas/tests/io/json/test_normalize.py
pandas/tests/indexing/test_indexing_engines.py
""" Arithmetic operations for PandasObjects This is not a public API. """ import datetime import operator import textwrap from typing import Dict, Optional import warnings import numpy as np from pandas._libs import algos as libalgos, lib, ops as libops from pandas.errors import NullFrequencyError from pandas.util._decorators import Appender from pandas.core.dtypes.cast import ( construct_1d_object_array_from_listlike, find_common_type, maybe_upcast_putmask) from pandas.core.dtypes.common import ( ensure_object, is_bool_dtype, is_categorical_dtype, is_datetime64_dtype, is_datetime64tz_dtype, is_datetimelike_v_numeric, is_extension_array_dtype, is_integer_dtype, is_list_like, is_object_dtype, is_period_dtype, is_scalar, is_timedelta64_dtype, needs_i8_conversion) from pandas.core.dtypes.generic import ( ABCDataFrame, ABCIndex, ABCIndexClass, ABCSeries, ABCSparseArray, ABCSparseSeries) from pandas.core.dtypes.missing import isna, notna import pandas as pd import pandas.core.common as com import pandas.core.missing as missing # ----------------------------------------------------------------------------- # Ops Wrapping Utilities def get_op_result_name(left, right): """ Find the appropriate name to pin to an operation result. This result should always be either an Index or a Series. Parameters ---------- left : {Series, Index} right : object Returns ------- name : object Usually a string """ # `left` is always a pd.Series when called from within ops if isinstance(right, (ABCSeries, pd.Index)): name = _maybe_match_name(left, right) else: name = left.name return name def _maybe_match_name(a, b): """ Try to find a name to attach to the result of an operation between a and b. If only one of these has a `name` attribute, return that name. Otherwise return a consensus name if they match of None if they have different names. Parameters ---------- a : object b : object Returns ------- name : str or None See Also -------- pandas.core.common.consensus_name_attr """ a_has = hasattr(a, 'name') b_has = hasattr(b, 'name') if a_has and b_has: if a.name == b.name: return a.name else: # TODO: what if they both have np.nan for their names? return None elif a_has: return a.name elif b_has: return b.name return None def maybe_upcast_for_op(obj): """ Cast non-pandas objects to pandas types to unify behavior of arithmetic and comparison operations. Parameters ---------- obj: object Returns ------- out : object Notes ----- Be careful to call this *after* determining the `name` attribute to be attached to the result of the arithmetic operation. """ if type(obj) is datetime.timedelta: # GH#22390 cast up to Timedelta to rely on Timedelta # implementation; otherwise operation against numeric-dtype # raises TypeError return pd.Timedelta(obj) elif isinstance(obj, np.timedelta64) and not isna(obj): # In particular non-nanosecond timedelta64 needs to be cast to # nanoseconds, or else we get undesired behavior like # np.timedelta64(3, 'D') / 2 == np.timedelta64(1, 'D') # The isna check is to avoid casting timedelta64("NaT"), which would # return NaT and incorrectly be treated as a datetime-NaT. return pd.Timedelta(obj) elif isinstance(obj, np.ndarray) and is_timedelta64_dtype(obj): # GH#22390 Unfortunately we need to special-case right-hand # timedelta64 dtypes because numpy casts integer dtypes to # timedelta64 when operating with timedelta64 return pd.TimedeltaIndex(obj) return obj # ----------------------------------------------------------------------------- # Reversed Operations not available in the stdlib operator module. # Defining these instead of using lambdas allows us to reference them by name. def radd(left, right): return right + left def rsub(left, right): return right - left def rmul(left, right): return right * left def rdiv(left, right): return right / left def rtruediv(left, right): return right / left def rfloordiv(left, right): return right // left def rmod(left, right): # check if right is a string as % is the string # formatting operation; this is a TypeError # otherwise perform the op if isinstance(right, str): raise TypeError("{typ} cannot perform the operation mod".format( typ=type(left).__name__)) return right % left def rdivmod(left, right): return divmod(right, left) def rpow(left, right): return right ** left def rand_(left, right): return operator.and_(right, left) def ror_(left, right): return operator.or_(right, left) def rxor(left, right): return operator.xor(right, left) # ----------------------------------------------------------------------------- def make_invalid_op(name): """ Return a binary method that always raises a TypeError. Parameters ---------- name : str Returns ------- invalid_op : function """ def invalid_op(self, other=None): raise TypeError("cannot perform {name} with this index type: " "{typ}".format(name=name, typ=type(self).__name__)) invalid_op.__name__ = name return invalid_op def _gen_eval_kwargs(name): """ Find the keyword arguments to pass to numexpr for the given operation. Parameters ---------- name : str Returns ------- eval_kwargs : dict Examples -------- >>> _gen_eval_kwargs("__add__") {} >>> _gen_eval_kwargs("rtruediv") {'reversed': True, 'truediv': True} """ kwargs = {} # Series appear to only pass __add__, __radd__, ... # but DataFrame gets both these dunder names _and_ non-dunder names # add, radd, ... name = name.replace('__', '') if name.startswith('r'): if name not in ['radd', 'rand', 'ror', 'rxor']: # Exclude commutative operations kwargs['reversed'] = True if name in ['truediv', 'rtruediv']: kwargs['truediv'] = True if name in ['ne']: kwargs['masker'] = True return kwargs def _gen_fill_zeros(name): """ Find the appropriate fill value to use when filling in undefined values in the results of the given operation caused by operating on (generally dividing by) zero. Parameters ---------- name : str Returns ------- fill_value : {None, np.nan, np.inf} """ name = name.strip('__') if 'div' in name: # truediv, floordiv, div, and reversed variants fill_value = np.inf elif 'mod' in name: # mod, rmod fill_value = np.nan else: fill_value = None return fill_value def _get_frame_op_default_axis(name): """ Only DataFrame cares about default_axis, specifically: special methods have default_axis=None and flex methods have default_axis='columns'. Parameters ---------- name : str Returns ------- default_axis: str or None """ if name.replace('__r', '__') in ['__and__', '__or__', '__xor__']: # bool methods return 'columns' elif name.startswith('__'): # __add__, __mul__, ... return None else: # add, mul, ... return 'columns' def _get_opstr(op, cls): """ Find the operation string, if any, to pass to numexpr for this operation. Parameters ---------- op : binary operator cls : class Returns ------- op_str : string or None """ # numexpr is available for non-sparse classes subtyp = getattr(cls, '_subtyp', '') use_numexpr = 'sparse' not in subtyp if not use_numexpr: # if we're not using numexpr, then don't pass a str_rep return None return {operator.add: '+', radd: '+', operator.mul: '*', rmul: '*', operator.sub: '-', rsub: '-', operator.truediv: '/', rtruediv: '/', operator.floordiv: '//', rfloordiv: '//', operator.mod: None, # TODO: Why None for mod but '%' for rmod? rmod: '%', operator.pow: '**', rpow: '**', operator.eq: '==', operator.ne: '!=', operator.le: '<=', operator.lt: '<', operator.ge: '>=', operator.gt: '>', operator.and_: '&', rand_: '&', operator.or_: '|', ror_: '|', operator.xor: '^', rxor: '^', divmod: None, rdivmod: None}[op] def _get_op_name(op, special): """ Find the name to attach to this method according to conventions for special and non-special methods. Parameters ---------- op : binary operator special : bool Returns ------- op_name : str """ opname = op.__name__.strip('_') if special: opname = '__{opname}__'.format(opname=opname) return opname # ----------------------------------------------------------------------------- # Docstring Generation and Templates _add_example_SERIES = """ Examples -------- >>> a = pd.Series([1, 1, 1, np.nan], index=['a', 'b', 'c', 'd']) >>> a a 1.0 b 1.0 c 1.0 d NaN dtype: float64 >>> b = pd.Series([1, np.nan, 1, np.nan], index=['a', 'b', 'd', 'e']) >>> b a 1.0 b NaN d 1.0 e NaN dtype: float64 >>> a.add(b, fill_value=0) a 2.0 b 1.0 c 1.0 d 1.0 e NaN dtype: float64 """ _sub_example_SERIES = """ Examples -------- >>> a = pd.Series([1, 1, 1, np.nan], index=['a', 'b', 'c', 'd']) >>> a a 1.0 b 1.0 c 1.0 d NaN dtype: float64 >>> b = pd.Series([1, np.nan, 1, np.nan], index=['a', 'b', 'd', 'e']) >>> b a 1.0 b NaN d 1.0 e NaN dtype: float64 >>> a.subtract(b, fill_value=0) a 0.0 b 1.0 c 1.0 d -1.0 e NaN dtype: float64 """ _mul_example_SERIES = """ Examples -------- >>> a = pd.Series([1, 1, 1, np.nan], index=['a', 'b', 'c', 'd']) >>> a a 1.0 b 1.0 c 1.0 d NaN dtype: float64 >>> b = pd.Series([1, np.nan, 1, np.nan], index=['a', 'b', 'd', 'e']) >>> b a 1.0 b NaN d 1.0 e NaN dtype: float64 >>> a.multiply(b, fill_value=0) a 1.0 b 0.0 c 0.0 d 0.0 e NaN dtype: float64 """ _div_example_SERIES = """ Examples -------- >>> a = pd.Series([1, 1, 1, np.nan], index=['a', 'b', 'c', 'd']) >>> a a 1.0 b 1.0 c 1.0 d NaN dtype: float64 >>> b = pd.Series([1, np.nan, 1, np.nan], index=['a', 'b', 'd', 'e']) >>> b a 1.0 b NaN d 1.0 e NaN dtype: float64 >>> a.divide(b, fill_value=0) a 1.0 b inf c inf d 0.0 e NaN dtype: float64 """ _floordiv_example_SERIES = """ Examples -------- >>> a = pd.Series([1, 1, 1, np.nan], index=['a', 'b', 'c', 'd']) >>> a a 1.0 b 1.0 c 1.0 d NaN dtype: float64 >>> b = pd.Series([1, np.nan, 1, np.nan], index=['a', 'b', 'd', 'e']) >>> b a 1.0 b NaN d 1.0 e NaN dtype: float64 >>> a.floordiv(b, fill_value=0) a 1.0 b NaN c NaN d 0.0 e NaN dtype: float64 """ _mod_example_SERIES = """ Examples -------- >>> a = pd.Series([1, 1, 1, np.nan], index=['a', 'b', 'c', 'd']) >>> a a 1.0 b 1.0 c 1.0 d NaN dtype: float64 >>> b = pd.Series([1, np.nan, 1, np.nan], index=['a', 'b', 'd', 'e']) >>> b a 1.0 b NaN d 1.0 e NaN dtype: float64 >>> a.mod(b, fill_value=0) a 0.0 b NaN c NaN d 0.0 e NaN dtype: float64 """ _pow_example_SERIES = """ Examples -------- >>> a = pd.Series([1, 1, 1, np.nan], index=['a', 'b', 'c', 'd']) >>> a a 1.0 b 1.0 c 1.0 d NaN dtype: float64 >>> b = pd.Series([1, np.nan, 1, np.nan], index=['a', 'b', 'd', 'e']) >>> b a 1.0 b NaN d 1.0 e NaN dtype: float64 >>> a.pow(b, fill_value=0) a 1.0 b 1.0 c 1.0 d 0.0 e NaN dtype: float64 """ _op_descriptions = { # Arithmetic Operators 'add': {'op': '+', 'desc': 'Addition', 'reverse': 'radd', 'series_examples': _add_example_SERIES}, 'sub': {'op': '-', 'desc': 'Subtraction', 'reverse': 'rsub', 'series_examples': _sub_example_SERIES}, 'mul': {'op': '*', 'desc': 'Multiplication', 'reverse': 'rmul', 'series_examples': _mul_example_SERIES, 'df_examples': None}, 'mod': {'op': '%', 'desc': 'Modulo', 'reverse': 'rmod', 'series_examples': _mod_example_SERIES}, 'pow': {'op': '**', 'desc': 'Exponential power', 'reverse': 'rpow', 'series_examples': _pow_example_SERIES, 'df_examples': None}, 'truediv': {'op': '/', 'desc': 'Floating division', 'reverse': 'rtruediv', 'series_examples': _div_example_SERIES, 'df_examples': None}, 'floordiv': {'op': '//', 'desc': 'Integer division', 'reverse': 'rfloordiv', 'series_examples': _floordiv_example_SERIES, 'df_examples': None}, 'divmod': {'op': 'divmod', 'desc': 'Integer division and modulo', 'reverse': 'rdivmod', 'series_examples': None, 'df_examples': None}, # Comparison Operators 'eq': {'op': '==', 'desc': 'Equal to', 'reverse': None, 'series_examples': None}, 'ne': {'op': '!=', 'desc': 'Not equal to', 'reverse': None, 'series_examples': None}, 'lt': {'op': '<', 'desc': 'Less than', 'reverse': None, 'series_examples': None}, 'le': {'op': '<=', 'desc': 'Less than or equal to', 'reverse': None, 'series_examples': None}, 'gt': {'op': '>', 'desc': 'Greater than', 'reverse': None, 'series_examples': None}, 'ge': {'op': '>=', 'desc': 'Greater than or equal to', 'reverse': None, 'series_examples': None} } # type: Dict[str, Dict[str, Optional[str]]] _op_names = list(_op_descriptions.keys()) for key in _op_names: reverse_op = _op_descriptions[key]['reverse'] if reverse_op is not None: _op_descriptions[reverse_op] = _op_descriptions[key].copy() _op_descriptions[reverse_op]['reverse'] = key _flex_doc_SERIES = """ Return {desc} of series and other, element-wise (binary operator `{op_name}`). Equivalent to ``{equiv}``, but with support to substitute a fill_value for missing data in one of the inputs. Parameters ---------- other : Series or scalar value fill_value : None or float value, default None (NaN) Fill existing missing (NaN) values, and any new element needed for successful Series alignment, with this value before computation. If data in both corresponding Series locations is missing the result will be missing. level : int or name Broadcast across a level, matching Index values on the passed MultiIndex level. Returns ------- Series The result of the operation. See Also -------- Series.{reverse} """ _arith_doc_FRAME = """ Binary operator %s with support to substitute a fill_value for missing data in one of the inputs Parameters ---------- other : Series, DataFrame, or constant axis : {0, 1, 'index', 'columns'} For Series input, axis to match Series index on fill_value : None or float value, default None Fill existing missing (NaN) values, and any new element needed for successful DataFrame alignment, with this value before computation. If data in both corresponding DataFrame locations is missing the result will be missing level : int or name Broadcast across a level, matching Index values on the passed MultiIndex level Returns ------- result : DataFrame Notes ----- Mismatched indices will be unioned together """ _flex_doc_FRAME = """ Get {desc} of dataframe and other, element-wise (binary operator `{op_name}`). Equivalent to ``{equiv}``, but with support to substitute a fill_value for missing data in one of the inputs. With reverse version, `{reverse}`. Among flexible wrappers (`add`, `sub`, `mul`, `div`, `mod`, `pow`) to arithmetic operators: `+`, `-`, `*`, `/`, `//`, `%`, `**`. Parameters ---------- other : scalar, sequence, Series, or DataFrame Any single or multiple element data structure, or list-like object. axis : {{0 or 'index', 1 or 'columns'}} Whether to compare by the index (0 or 'index') or columns (1 or 'columns'). For Series input, axis to match Series index on. level : int or label Broadcast across a level, matching Index values on the passed MultiIndex level. fill_value : float or None, default None Fill existing missing (NaN) values, and any new element needed for successful DataFrame alignment, with this value before computation. If data in both corresponding DataFrame locations is missing the result will be missing. Returns ------- DataFrame Result of the arithmetic operation. See Also -------- DataFrame.add : Add DataFrames. DataFrame.sub : Subtract DataFrames. DataFrame.mul : Multiply DataFrames. DataFrame.div : Divide DataFrames (float division). DataFrame.truediv : Divide DataFrames (float division). DataFrame.floordiv : Divide DataFrames (integer division). DataFrame.mod : Calculate modulo (remainder after division). DataFrame.pow : Calculate exponential power. Notes ----- Mismatched indices will be unioned together. Examples -------- >>> df = pd.DataFrame({{'angles': [0, 3, 4], ... 'degrees': [360, 180, 360]}}, ... index=['circle', 'triangle', 'rectangle']) >>> df angles degrees circle 0 360 triangle 3 180 rectangle 4 360 Add a scalar with operator version which return the same results. >>> df + 1 angles degrees circle 1 361 triangle 4 181 rectangle 5 361 >>> df.add(1) angles degrees circle 1 361 triangle 4 181 rectangle 5 361 Divide by constant with reverse version. >>> df.div(10) angles degrees circle 0.0 36.0 triangle 0.3 18.0 rectangle 0.4 36.0 >>> df.rdiv(10) angles degrees circle inf 0.027778 triangle 3.333333 0.055556 rectangle 2.500000 0.027778 Subtract a list and Series by axis with operator version. >>> df - [1, 2] angles degrees circle -1 358 triangle 2 178 rectangle 3 358 >>> df.sub([1, 2], axis='columns') angles degrees circle -1 358 triangle 2 178 rectangle 3 358 >>> df.sub(pd.Series([1, 1, 1], index=['circle', 'triangle', 'rectangle']), ... axis='index') angles degrees circle -1 359 triangle 2 179 rectangle 3 359 Multiply a DataFrame of different shape with operator version. >>> other = pd.DataFrame({{'angles': [0, 3, 4]}}, ... index=['circle', 'triangle', 'rectangle']) >>> other angles circle 0 triangle 3 rectangle 4 >>> df * other angles degrees circle 0 NaN triangle 9 NaN rectangle 16 NaN >>> df.mul(other, fill_value=0) angles degrees circle 0 0.0 triangle 9 0.0 rectangle 16 0.0 Divide by a MultiIndex by level. >>> df_multindex = pd.DataFrame({{'angles': [0, 3, 4, 4, 5, 6], ... 'degrees': [360, 180, 360, 360, 540, 720]}}, ... index=[['A', 'A', 'A', 'B', 'B', 'B'], ... ['circle', 'triangle', 'rectangle', ... 'square', 'pentagon', 'hexagon']]) >>> df_multindex angles degrees A circle 0 360 triangle 3 180 rectangle 4 360 B square 4 360 pentagon 5 540 hexagon 6 720 >>> df.div(df_multindex, level=1, fill_value=0) angles degrees A circle NaN 1.0 triangle 1.0 1.0 rectangle 1.0 1.0 B square 0.0 0.0 pentagon 0.0 0.0 hexagon 0.0 0.0 """ _flex_comp_doc_FRAME = """ Get {desc} of dataframe and other, element-wise (binary operator `{op_name}`). Among flexible wrappers (`eq`, `ne`, `le`, `lt`, `ge`, `gt`) to comparison operators. Equivalent to `==`, `=!`, `<=`, `<`, `>=`, `>` with support to choose axis (rows or columns) and level for comparison. Parameters ---------- other : scalar, sequence, Series, or DataFrame Any single or multiple element data structure, or list-like object. axis : {{0 or 'index', 1 or 'columns'}}, default 'columns' Whether to compare by the index (0 or 'index') or columns (1 or 'columns'). level : int or label Broadcast across a level, matching Index values on the passed MultiIndex level. Returns ------- DataFrame of bool Result of the comparison. See Also -------- DataFrame.eq : Compare DataFrames for equality elementwise. DataFrame.ne : Compare DataFrames for inequality elementwise. DataFrame.le : Compare DataFrames for less than inequality or equality elementwise. DataFrame.lt : Compare DataFrames for strictly less than inequality elementwise. DataFrame.ge : Compare DataFrames for greater than inequality or equality elementwise. DataFrame.gt : Compare DataFrames for strictly greater than inequality elementwise. Notes ----- Mismatched indices will be unioned together. `NaN` values are considered different (i.e. `NaN` != `NaN`). Examples -------- >>> df = pd.DataFrame({{'cost': [250, 150, 100], ... 'revenue': [100, 250, 300]}}, ... index=['A', 'B', 'C']) >>> df cost revenue A 250 100 B 150 250 C 100 300 Comparison with a scalar, using either the operator or method: >>> df == 100 cost revenue A False True B False False C True False >>> df.eq(100) cost revenue A False True B False False C True False When `other` is a :class:`Series`, the columns of a DataFrame are aligned with the index of `other` and broadcast: >>> df != pd.Series([100, 250], index=["cost", "revenue"]) cost revenue A True True B True False C False True Use the method to control the broadcast axis: >>> df.ne(pd.Series([100, 300], index=["A", "D"]), axis='index') cost revenue A True False B True True C True True D True True When comparing to an arbitrary sequence, the number of columns must match the number elements in `other`: >>> df == [250, 100] cost revenue A True True B False False C False False Use the method to control the axis: >>> df.eq([250, 250, 100], axis='index') cost revenue A True False B False True C True False Compare to a DataFrame of different shape. >>> other = pd.DataFrame({{'revenue': [300, 250, 100, 150]}}, ... index=['A', 'B', 'C', 'D']) >>> other revenue A 300 B 250 C 100 D 150 >>> df.gt(other) cost revenue A False False B False False C False True D False False Compare to a MultiIndex by level. >>> df_multindex = pd.DataFrame({{'cost': [250, 150, 100, 150, 300, 220], ... 'revenue': [100, 250, 300, 200, 175, 225]}}, ... index=[['Q1', 'Q1', 'Q1', 'Q2', 'Q2', 'Q2'], ... ['A', 'B', 'C', 'A', 'B', 'C']]) >>> df_multindex cost revenue Q1 A 250 100 B 150 250 C 100 300 Q2 A 150 200 B 300 175 C 220 225 >>> df.le(df_multindex, level=1) cost revenue Q1 A True True B True True C True True Q2 A False True B True False C True False """ def _make_flex_doc(op_name, typ): """ Make the appropriate substitutions for the given operation and class-typ into either _flex_doc_SERIES or _flex_doc_FRAME to return the docstring to attach to a generated method. Parameters ---------- op_name : str {'__add__', '__sub__', ... '__eq__', '__ne__', ...} typ : str {series, 'dataframe']} Returns ------- doc : str """ op_name = op_name.replace('__', '') op_desc = _op_descriptions[op_name] if op_name.startswith('r'): equiv = 'other ' + op_desc['op'] + ' ' + typ else: equiv = typ + ' ' + op_desc['op'] + ' other' if typ == 'series': base_doc = _flex_doc_SERIES doc_no_examples = base_doc.format( desc=op_desc['desc'], op_name=op_name, equiv=equiv, reverse=op_desc['reverse'] ) if op_desc['series_examples']: doc = doc_no_examples + op_desc['series_examples'] else: doc = doc_no_examples elif typ == 'dataframe': base_doc = _flex_doc_FRAME doc = base_doc.format( desc=op_desc['desc'], op_name=op_name, equiv=equiv, reverse=op_desc['reverse'] ) else: raise AssertionError('Invalid typ argument.') return doc # ----------------------------------------------------------------------------- # Masking NA values and fallbacks for operations numpy does not support def fill_binop(left, right, fill_value): """ If a non-None fill_value is given, replace null entries in left and right with this value, but only in positions where _one_ of left/right is null, not both. Parameters ---------- left : array-like right : array-like fill_value : object Returns ------- left : array-like right : array-like Notes ----- Makes copies if fill_value is not None """ # TODO: can we make a no-copy implementation? if fill_value is not None: left_mask = isna(left) right_mask = isna(right) left = left.copy() right = right.copy() # one but not both mask = left_mask ^ right_mask left[left_mask & mask] = fill_value right[right_mask & mask] = fill_value return left, right def mask_cmp_op(x, y, op): """ Apply the function `op` to only non-null points in x and y. Parameters ---------- x : array-like y : array-like op : binary operation Returns ------- result : ndarray[bool] """ xrav = x.ravel() result = np.empty(x.size, dtype=bool) if isinstance(y, (np.ndarray, ABCSeries)): yrav = y.ravel() mask = notna(xrav) & notna(yrav) result[mask] = op(np.array(list(xrav[mask])), np.array(list(yrav[mask]))) else: mask = notna(xrav) result[mask] = op(np.array(list(xrav[mask])), y) if op == operator.ne: # pragma: no cover np.putmask(result, ~mask, True) else: np.putmask(result, ~mask, False) result = result.reshape(x.shape) return result def masked_arith_op(x, y, op): """ If the given arithmetic operation fails, attempt it again on only the non-null elements of the input array(s). Parameters ---------- x : np.ndarray y : np.ndarray, Series, Index op : binary operator """ # For Series `x` is 1D so ravel() is a no-op; calling it anyway makes # the logic valid for both Series and DataFrame ops. xrav = x.ravel() assert isinstance(x, (np.ndarray, ABCSeries)), type(x) if isinstance(y, (np.ndarray, ABCSeries, ABCIndexClass)): dtype = find_common_type([x.dtype, y.dtype]) result = np.empty(x.size, dtype=dtype) # PeriodIndex.ravel() returns int64 dtype, so we have # to work around that case. See GH#19956 yrav = y if is_period_dtype(y) else y.ravel() mask = notna(xrav) & notna(yrav) if yrav.shape != mask.shape: # FIXME: GH#5284, GH#5035, GH#19448 # Without specifically raising here we get mismatched # errors in Py3 (TypeError) vs Py2 (ValueError) # Note: Only = an issue in DataFrame case raise ValueError('Cannot broadcast operands together.') if mask.any(): with np.errstate(all='ignore'): result[mask] = op(xrav[mask], com.values_from_object(yrav[mask])) else: assert is_scalar(y), type(y) assert isinstance(x, np.ndarray), type(x) # mask is only meaningful for x result = np.empty(x.size, dtype=x.dtype) mask = notna(xrav) # 1 ** np.nan is 1. So we have to unmask those. if op == pow: mask = np.where(x == 1, False, mask) elif op == rpow: mask = np.where(y == 1, False, mask) if mask.any(): with np.errstate(all='ignore'): result[mask] = op(xrav[mask], y) result, changed = maybe_upcast_putmask(result, ~mask, np.nan) result = result.reshape(x.shape) # 2D compat return result def invalid_comparison(left, right, op): """ If a comparison has mismatched types and is not necessarily meaningful, follow python3 conventions by: - returning all-False for equality - returning all-True for inequality - raising TypeError otherwise Parameters ---------- left : array-like right : scalar, array-like op : operator.{eq, ne, lt, le, gt} Raises ------ TypeError : on inequality comparisons """ if op is operator.eq: res_values = np.zeros(left.shape, dtype=bool) elif op is operator.ne: res_values = np.ones(left.shape, dtype=bool) else: raise TypeError("Invalid comparison between dtype={dtype} and {typ}" .format(dtype=left.dtype, typ=type(right).__name__)) return res_values # ----------------------------------------------------------------------------- # Dispatch logic def should_series_dispatch(left, right, op): """ Identify cases where a DataFrame operation should dispatch to its Series counterpart. Parameters ---------- left : DataFrame right : DataFrame op : binary operator Returns ------- override : bool """ if left._is_mixed_type or right._is_mixed_type: return True if not len(left.columns) or not len(right.columns): # ensure obj.dtypes[0] exists for each obj return False ldtype = left.dtypes.iloc[0] rdtype = right.dtypes.iloc[0] if ((is_timedelta64_dtype(ldtype) and is_integer_dtype(rdtype)) or (is_timedelta64_dtype(rdtype) and is_integer_dtype(ldtype))): # numpy integer dtypes as timedelta64 dtypes in this scenario return True if is_datetime64_dtype(ldtype) and is_object_dtype(rdtype): # in particular case where right is an array of DateOffsets return True return False def dispatch_to_series(left, right, func, str_rep=None, axis=None): """ Evaluate the frame operation func(left, right) by evaluating column-by-column, dispatching to the Series implementation. Parameters ---------- left : DataFrame right : scalar or DataFrame func : arithmetic or comparison operator str_rep : str or None, default None axis : {None, 0, 1, "index", "columns"} Returns ------- DataFrame """ # Note: we use iloc to access columns for compat with cases # with non-unique columns. import pandas.core.computation.expressions as expressions right = lib.item_from_zerodim(right) if lib.is_scalar(right) or np.ndim(right) == 0: def column_op(a, b): return {i: func(a.iloc[:, i], b) for i in range(len(a.columns))} elif isinstance(right, ABCDataFrame): assert right._indexed_same(left) def column_op(a, b): return {i: func(a.iloc[:, i], b.iloc[:, i]) for i in range(len(a.columns))} elif isinstance(right, ABCSeries) and axis == "columns": # We only get here if called via left._combine_match_columns, # in which case we specifically want to operate row-by-row assert right.index.equals(left.columns) def column_op(a, b): return {i: func(a.iloc[:, i], b.iloc[i]) for i in range(len(a.columns))} elif isinstance(right, ABCSeries): assert right.index.equals(left.index) # Handle other cases later def column_op(a, b): return {i: func(a.iloc[:, i], b) for i in range(len(a.columns))} else: # Remaining cases have less-obvious dispatch rules raise NotImplementedError(right) new_data = expressions.evaluate(column_op, str_rep, left, right) result = left._constructor(new_data, index=left.index, copy=False) # Pin columns instead of passing to constructor for compat with # non-unique columns case result.columns = left.columns return result def dispatch_to_index_op(op, left, right, index_class): """ Wrap Series left in the given index_class to delegate the operation op to the index implementation. DatetimeIndex and TimedeltaIndex perform type checking, timezone handling, overflow checks, etc. Parameters ---------- op : binary operator (operator.add, operator.sub, ...) left : Series right : object index_class : DatetimeIndex or TimedeltaIndex Returns ------- result : object, usually DatetimeIndex, TimedeltaIndex, or Series """ left_idx = index_class(left) # avoid accidentally allowing integer add/sub. For datetime64[tz] dtypes, # left_idx may inherit a freq from a cached DatetimeIndex. # See discussion in GH#19147. if getattr(left_idx, 'freq', None) is not None: left_idx = left_idx._shallow_copy(freq=None) try: result = op(left_idx, right) except NullFrequencyError: # DatetimeIndex and TimedeltaIndex with freq == None raise ValueError # on add/sub of integers (or int-like). We re-raise as a TypeError. raise TypeError('incompatible type for a datetime/timedelta ' 'operation [{name}]'.format(name=op.__name__)) return result def dispatch_to_extension_op(op, left, right): """ Assume that left or right is a Series backed by an ExtensionArray, apply the operator defined by op. """ # The op calls will raise TypeError if the op is not defined # on the ExtensionArray # unbox Series and Index to arrays if isinstance(left, (ABCSeries, ABCIndexClass)): new_left = left._values else: new_left = left if isinstance(right, (ABCSeries, ABCIndexClass)): new_right = right._values else: new_right = right res_values = op(new_left, new_right) res_name = get_op_result_name(left, right) if op.__name__ in ['divmod', 'rdivmod']: return _construct_divmod_result( left, res_values, left.index, res_name) return _construct_result(left, res_values, left.index, res_name) # ----------------------------------------------------------------------------- # Functions that add arithmetic methods to objects, given arithmetic factory # methods def _get_method_wrappers(cls): """ Find the appropriate operation-wrappers to use when defining flex/special arithmetic, boolean, and comparison operations with the given class. Parameters ---------- cls : class Returns ------- arith_flex : function or None comp_flex : function or None arith_special : function comp_special : function bool_special : function Notes ----- None is only returned for SparseArray """ if issubclass(cls, ABCSparseSeries): # Be sure to catch this before ABCSeries and ABCSparseArray, # as they will both come see SparseSeries as a subclass arith_flex = _flex_method_SERIES comp_flex = _flex_method_SERIES arith_special = _arith_method_SPARSE_SERIES comp_special = _arith_method_SPARSE_SERIES bool_special = _bool_method_SERIES # TODO: I don't think the functions defined by bool_method are tested elif issubclass(cls, ABCSeries): # Just Series; SparseSeries is caught above arith_flex = _flex_method_SERIES comp_flex = _flex_method_SERIES arith_special = _arith_method_SERIES comp_special = _comp_method_SERIES bool_special = _bool_method_SERIES elif issubclass(cls, ABCSparseArray): arith_flex = None comp_flex = None arith_special = _arith_method_SPARSE_ARRAY comp_special = _arith_method_SPARSE_ARRAY bool_special = _arith_method_SPARSE_ARRAY elif issubclass(cls, ABCDataFrame): # Same for DataFrame and SparseDataFrame arith_flex = _arith_method_FRAME comp_flex = _flex_comp_method_FRAME arith_special = _arith_method_FRAME comp_special = _comp_method_FRAME bool_special = _arith_method_FRAME return arith_flex, comp_flex, arith_special, comp_special, bool_special def _create_methods(cls, arith_method, comp_method, bool_method, special): # creates actual methods based upon arithmetic, comp and bool method # constructors. have_divmod = issubclass(cls, ABCSeries) # divmod is available for Series and SparseSeries # yapf: disable new_methods = dict( add=arith_method(cls, operator.add, special), radd=arith_method(cls, radd, special), sub=arith_method(cls, operator.sub, special), mul=arith_method(cls, operator.mul, special), truediv=arith_method(cls, operator.truediv, special), floordiv=arith_method(cls, operator.floordiv, special), # Causes a floating point exception in the tests when numexpr enabled, # so for now no speedup mod=arith_method(cls, operator.mod, special), pow=arith_method(cls, operator.pow, special), # not entirely sure why this is necessary, but previously was included # so it's here to maintain compatibility rmul=arith_method(cls, rmul, special), rsub=arith_method(cls, rsub, special), rtruediv=arith_method(cls, rtruediv, special), rfloordiv=arith_method(cls, rfloordiv, special), rpow=arith_method(cls, rpow, special), rmod=arith_method(cls, rmod, special)) # yapf: enable new_methods['div'] = new_methods['truediv'] new_methods['rdiv'] = new_methods['rtruediv'] if have_divmod: # divmod doesn't have an op that is supported by numexpr new_methods['divmod'] = arith_method(cls, divmod, special) new_methods['rdivmod'] = arith_method(cls, rdivmod, special) new_methods.update(dict( eq=comp_method(cls, operator.eq, special), ne=comp_method(cls, operator.ne, special), lt=comp_method(cls, operator.lt, special), gt=comp_method(cls, operator.gt, special), le=comp_method(cls, operator.le, special), ge=comp_method(cls, operator.ge, special))) if bool_method: new_methods.update( dict(and_=bool_method(cls, operator.and_, special), or_=bool_method(cls, operator.or_, special), # For some reason ``^`` wasn't used in original. xor=bool_method(cls, operator.xor, special), rand_=bool_method(cls, rand_, special), ror_=bool_method(cls, ror_, special), rxor=bool_method(cls, rxor, special))) if special: dunderize = lambda x: '__{name}__'.format(name=x.strip('_')) else: dunderize = lambda x: x new_methods = {dunderize(k): v for k, v in new_methods.items()} return new_methods def add_methods(cls, new_methods): for name, method in new_methods.items(): # For most methods, if we find that the class already has a method # of the same name, it is OK to over-write it. The exception is # inplace methods (__iadd__, __isub__, ...) for SparseArray, which # retain the np.ndarray versions. force = not (issubclass(cls, ABCSparseArray) and name.startswith('__i')) if force or name not in cls.__dict__: setattr(cls, name, method) # ---------------------------------------------------------------------- # Arithmetic def add_special_arithmetic_methods(cls): """ Adds the full suite of special arithmetic methods (``__add__``, ``__sub__``, etc.) to the class. Parameters ---------- cls : class special methods will be defined and pinned to this class """ _, _, arith_method, comp_method, bool_method = _get_method_wrappers(cls) new_methods = _create_methods(cls, arith_method, comp_method, bool_method, special=True) # inplace operators (I feel like these should get passed an `inplace=True` # or just be removed def _wrap_inplace_method(method): """ return an inplace wrapper for this method """ def f(self, other): result = method(self, other) # this makes sure that we are aligned like the input # we are updating inplace so we want to ignore is_copy self._update_inplace(result.reindex_like(self, copy=False)._data, verify_is_copy=False) return self f.__name__ = "__i{name}__".format(name=method.__name__.strip("__")) return f new_methods.update( dict(__iadd__=_wrap_inplace_method(new_methods["__add__"]), __isub__=_wrap_inplace_method(new_methods["__sub__"]), __imul__=_wrap_inplace_method(new_methods["__mul__"]), __itruediv__=_wrap_inplace_method(new_methods["__truediv__"]), __ifloordiv__=_wrap_inplace_method(new_methods["__floordiv__"]), __imod__=_wrap_inplace_method(new_methods["__mod__"]), __ipow__=_wrap_inplace_method(new_methods["__pow__"]))) new_methods.update( dict(__iand__=_wrap_inplace_method(new_methods["__and__"]), __ior__=_wrap_inplace_method(new_methods["__or__"]), __ixor__=_wrap_inplace_method(new_methods["__xor__"]))) add_methods(cls, new_methods=new_methods) def add_flex_arithmetic_methods(cls): """ Adds the full suite of flex arithmetic methods (``pow``, ``mul``, ``add``) to the class. Parameters ---------- cls : class flex methods will be defined and pinned to this class """ flex_arith_method, flex_comp_method, _, _, _ = _get_method_wrappers(cls) new_methods = _create_methods(cls, flex_arith_method, flex_comp_method, bool_method=None, special=False) new_methods.update(dict(multiply=new_methods['mul'], subtract=new_methods['sub'], divide=new_methods['div'])) # opt out of bool flex methods for now assert not any(kname in new_methods for kname in ('ror_', 'rxor', 'rand_')) add_methods(cls, new_methods=new_methods) # ----------------------------------------------------------------------------- # Series def _align_method_SERIES(left, right, align_asobject=False): """ align lhs and rhs Series """ # ToDo: Different from _align_method_FRAME, list, tuple and ndarray # are not coerced here # because Series has inconsistencies described in #13637 if isinstance(right, ABCSeries): # avoid repeated alignment if not left.index.equals(right.index): if align_asobject: # to keep original value's dtype for bool ops left = left.astype(object) right = right.astype(object) left, right = left.align(right, copy=False) return left, right def _construct_result(left, result, index, name, dtype=None): """ If the raw op result has a non-None name (e.g. it is an Index object) and the name argument is None, then passing name to the constructor will not be enough; we still need to override the name attribute. """ out = left._constructor(result, index=index, dtype=dtype) out = out.__finalize__(left) out.name = name return out def _construct_divmod_result(left, result, index, name, dtype=None): """divmod returns a tuple of like indexed series instead of a single series. """ return ( _construct_result(left, result[0], index=index, name=name, dtype=dtype), _construct_result(left, result[1], index=index, name=name, dtype=dtype), ) def _arith_method_SERIES(cls, op, special): """ Wrapper function for Series arithmetic operations, to avoid code duplication. """ str_rep = _get_opstr(op, cls) op_name = _get_op_name(op, special) eval_kwargs = _gen_eval_kwargs(op_name) fill_zeros = _gen_fill_zeros(op_name) construct_result = (_construct_divmod_result if op in [divmod, rdivmod] else _construct_result) def na_op(x, y): """ Return the result of evaluating op on the passed in values. If native types are not compatible, try coersion to object dtype. Parameters ---------- x : array-like y : array-like or scalar Returns ------- array-like Raises ------ TypeError : invalid operation """ import pandas.core.computation.expressions as expressions try: result = expressions.evaluate(op, str_rep, x, y, **eval_kwargs) except TypeError: result = masked_arith_op(x, y, op) except Exception: # TODO: more specific? if is_object_dtype(x): return libalgos.arrmap_object(x, lambda val: op(val, y)) raise result = missing.fill_zeros(result, x, y, op_name, fill_zeros) return result def wrapper(left, right): if isinstance(right, ABCDataFrame): return NotImplemented left, right = _align_method_SERIES(left, right) res_name = get_op_result_name(left, right) right = maybe_upcast_for_op(right) if is_categorical_dtype(left): raise TypeError("{typ} cannot perform the operation " "{op}".format(typ=type(left).__name__, op=str_rep)) elif is_datetime64_dtype(left) or is_datetime64tz_dtype(left): # Give dispatch_to_index_op a chance for tests like # test_dt64_series_add_intlike, which the index dispatching handles # specifically. result = dispatch_to_index_op(op, left, right, pd.DatetimeIndex) return construct_result(left, result, index=left.index, name=res_name, dtype=result.dtype) elif (is_extension_array_dtype(left) or (is_extension_array_dtype(right) and not is_scalar(right))): # GH#22378 disallow scalar to exclude e.g. "category", "Int64" return dispatch_to_extension_op(op, left, right) elif is_timedelta64_dtype(left): result = dispatch_to_index_op(op, left, right, pd.TimedeltaIndex) return construct_result(left, result, index=left.index, name=res_name) elif is_timedelta64_dtype(right): # We should only get here with non-scalar or timedelta64('NaT') # values for right # Note: we cannot use dispatch_to_index_op because # that may incorrectly raise TypeError when we # should get NullFrequencyError result = op(pd.Index(left), right) return construct_result(left, result, index=left.index, name=res_name, dtype=result.dtype) lvalues = left.values rvalues = right if isinstance(rvalues, ABCSeries): rvalues = rvalues.values with np.errstate(all='ignore'): result = na_op(lvalues, rvalues) return construct_result(left, result, index=left.index, name=res_name, dtype=None) wrapper.__name__ = op_name return wrapper def _comp_method_OBJECT_ARRAY(op, x, y): if isinstance(y, list): y = construct_1d_object_array_from_listlike(y) if isinstance(y, (np.ndarray, ABCSeries, ABCIndex)): if not is_object_dtype(y.dtype): y = y.astype(np.object_) if isinstance(y, (ABCSeries, ABCIndex)): y = y.values result = libops.vec_compare(x, y, op) else: result = libops.scalar_compare(x, y, op) return result def _comp_method_SERIES(cls, op, special): """ Wrapper function for Series arithmetic operations, to avoid code duplication. """ op_name = _get_op_name(op, special) masker = _gen_eval_kwargs(op_name).get('masker', False) def na_op(x, y): # TODO: # should have guarantess on what x, y can be type-wise # Extension Dtypes are not called here # Checking that cases that were once handled here are no longer # reachable. assert not (is_categorical_dtype(y) and not is_scalar(y)) if is_object_dtype(x.dtype): result = _comp_method_OBJECT_ARRAY(op, x, y) elif is_datetimelike_v_numeric(x, y): return invalid_comparison(x, y, op) else: # we want to compare like types # we only want to convert to integer like if # we are not NotImplemented, otherwise # we would allow datetime64 (but viewed as i8) against # integer comparisons # we have a datetime/timedelta and may need to convert assert not needs_i8_conversion(x) mask = None if not is_scalar(y) and needs_i8_conversion(y): mask = isna(x) | isna(y) y = y.view('i8') x = x.view('i8') method = getattr(x, op_name, None) if method is not None: with np.errstate(all='ignore'): result = method(y) if result is NotImplemented: return invalid_comparison(x, y, op) else: result = op(x, y) if mask is not None and mask.any(): result[mask] = masker return result def wrapper(self, other, axis=None): # Validate the axis parameter if axis is not None: self._get_axis_number(axis) res_name = get_op_result_name(self, other) if isinstance(other, list): # TODO: same for tuples? other = np.asarray(other) if isinstance(other, ABCDataFrame): # pragma: no cover # Defer to DataFrame implementation; fail early return NotImplemented elif isinstance(other, ABCSeries) and not self._indexed_same(other): raise ValueError("Can only compare identically-labeled " "Series objects") elif is_categorical_dtype(self): # Dispatch to Categorical implementation; pd.CategoricalIndex # behavior is non-canonical GH#19513 res_values = dispatch_to_index_op(op, self, other, pd.Categorical) return self._constructor(res_values, index=self.index, name=res_name) elif is_datetime64_dtype(self) or is_datetime64tz_dtype(self): # Dispatch to DatetimeIndex to ensure identical # Series/Index behavior if (isinstance(other, datetime.date) and not isinstance(other, datetime.datetime)): # https://github.com/pandas-dev/pandas/issues/21152 # Compatibility for difference between Series comparison w/ # datetime and date msg = ( "Comparing Series of datetimes with 'datetime.date'. " "Currently, the 'datetime.date' is coerced to a " "datetime. In the future pandas will not coerce, " "and {future}. " "To retain the current behavior, " "convert the 'datetime.date' to a datetime with " "'pd.Timestamp'." ) if op in {operator.lt, operator.le, operator.gt, operator.ge}: future = "a TypeError will be raised" else: future = ( "'the values will not compare equal to the " "'datetime.date'" ) msg = '\n'.join(textwrap.wrap(msg.format(future=future))) warnings.warn(msg, FutureWarning, stacklevel=2) other = pd.Timestamp(other) res_values = dispatch_to_index_op(op, self, other, pd.DatetimeIndex) return self._constructor(res_values, index=self.index, name=res_name) elif is_timedelta64_dtype(self): res_values = dispatch_to_index_op(op, self, other, pd.TimedeltaIndex) return self._constructor(res_values, index=self.index, name=res_name) elif (is_extension_array_dtype(self) or (is_extension_array_dtype(other) and not is_scalar(other))): # Note: the `not is_scalar(other)` condition rules out # e.g. other == "category" return dispatch_to_extension_op(op, self, other) elif isinstance(other, ABCSeries): # By this point we have checked that self._indexed_same(other) res_values = na_op(self.values, other.values) # rename is needed in case res_name is None and res_values.name # is not. return self._constructor(res_values, index=self.index, name=res_name).rename(res_name) elif isinstance(other, (np.ndarray, pd.Index)): # do not check length of zerodim array # as it will broadcast if other.ndim != 0 and len(self) != len(other): raise ValueError('Lengths must match to compare') res_values = na_op(self.values, np.asarray(other)) result = self._constructor(res_values, index=self.index) # rename is needed in case res_name is None and self.name # is not. return result.__finalize__(self).rename(res_name) elif is_scalar(other) and isna(other): # numpy does not like comparisons vs None if op is operator.ne: res_values = np.ones(len(self), dtype=bool) else: res_values = np.zeros(len(self), dtype=bool) return self._constructor(res_values, index=self.index, name=res_name, dtype='bool') else: values = self.get_values() with np.errstate(all='ignore'): res = na_op(values, other) if is_scalar(res): raise TypeError('Could not compare {typ} type with Series' .format(typ=type(other))) # always return a full value series here res_values = com.values_from_object(res) return self._constructor(res_values, index=self.index, name=res_name, dtype='bool') wrapper.__name__ = op_name return wrapper def _bool_method_SERIES(cls, op, special): """ Wrapper function for Series arithmetic operations, to avoid code duplication. """ op_name = _get_op_name(op, special) def na_op(x, y): try: result = op(x, y) except TypeError: assert not isinstance(y, (list, ABCSeries, ABCIndexClass)) if isinstance(y, np.ndarray): # bool-bool dtype operations should be OK, should not get here assert not (is_bool_dtype(x) and is_bool_dtype(y)) x = ensure_object(x) y = ensure_object(y) result = libops.vec_binop(x, y, op) else: # let null fall thru assert lib.is_scalar(y) if not isna(y): y = bool(y) try: result = libops.scalar_binop(x, y, op) except (TypeError, ValueError, AttributeError, OverflowError, NotImplementedError): raise TypeError("cannot compare a dtyped [{dtype}] array " "with a scalar of type [{typ}]" .format(dtype=x.dtype, typ=type(y).__name__)) return result fill_int = lambda x: x.fillna(0) fill_bool = lambda x: x.fillna(False).astype(bool) def wrapper(self, other): is_self_int_dtype = is_integer_dtype(self.dtype) self, other = _align_method_SERIES(self, other, align_asobject=True) res_name = get_op_result_name(self, other) if isinstance(other, ABCDataFrame): # Defer to DataFrame implementation; fail early return NotImplemented elif isinstance(other, (ABCSeries, ABCIndexClass)): is_other_int_dtype = is_integer_dtype(other.dtype) other = fill_int(other) if is_other_int_dtype else fill_bool(other) ovalues = other.values finalizer = lambda x: x else: # scalars, list, tuple, np.array is_other_int_dtype = is_integer_dtype(np.asarray(other)) if is_list_like(other) and not isinstance(other, np.ndarray): # TODO: Can we do this before the is_integer_dtype check? # could the is_integer_dtype check be checking the wrong # thing? e.g. other = [[0, 1], [2, 3], [4, 5]]? other = construct_1d_object_array_from_listlike(other) ovalues = other finalizer = lambda x: x.__finalize__(self) # For int vs int `^`, `|`, `&` are bitwise operators and return # integer dtypes. Otherwise these are boolean ops filler = (fill_int if is_self_int_dtype and is_other_int_dtype else fill_bool) res_values = na_op(self.values, ovalues) unfilled = self._constructor(res_values, index=self.index, name=res_name) filled = filler(unfilled) return finalizer(filled) wrapper.__name__ = op_name return wrapper def _flex_method_SERIES(cls, op, special): name = _get_op_name(op, special) doc = _make_flex_doc(name, 'series') @Appender(doc) def flex_wrapper(self, other, level=None, fill_value=None, axis=0): # validate axis if axis is not None: self._get_axis_number(axis) if isinstance(other, ABCSeries): return self._binop(other, op, level=level, fill_value=fill_value) elif isinstance(other, (np.ndarray, list, tuple)): if len(other) != len(self): raise ValueError('Lengths must be equal') other = self._constructor(other, self.index) return self._binop(other, op, level=level, fill_value=fill_value) else: if fill_value is not None: self = self.fillna(fill_value) return self._constructor(op(self, other), self.index).__finalize__(self) flex_wrapper.__name__ = name return flex_wrapper # ----------------------------------------------------------------------------- # DataFrame def _combine_series_frame(self, other, func, fill_value=None, axis=None, level=None): """ Apply binary operator `func` to self, other using alignment and fill conventions determined by the fill_value, axis, and level kwargs. Parameters ---------- self : DataFrame other : Series func : binary operator fill_value : object, default None axis : {0, 1, 'columns', 'index', None}, default None level : int or None, default None Returns ------- result : DataFrame """ if fill_value is not None: raise NotImplementedError("fill_value {fill} not supported." .format(fill=fill_value)) if axis is not None: axis = self._get_axis_number(axis) if axis == 0: return self._combine_match_index(other, func, level=level) else: return self._combine_match_columns(other, func, level=level) else: if not len(other): return self * np.nan if not len(self): # Ambiguous case, use _series so works with DataFrame return self._constructor(data=self._series, index=self.index, columns=self.columns) # default axis is columns return self._combine_match_columns(other, func, level=level) def _align_method_FRAME(left, right, axis): """ convert rhs to meet lhs dims if input is list, tuple or np.ndarray """ def to_series(right): msg = ('Unable to coerce to Series, length must be {req_len}: ' 'given {given_len}') if axis is not None and left._get_axis_name(axis) == 'index': if len(left.index) != len(right): raise ValueError(msg.format(req_len=len(left.index), given_len=len(right))) right = left._constructor_sliced(right, index=left.index) else: if len(left.columns) != len(right): raise ValueError(msg.format(req_len=len(left.columns), given_len=len(right))) right = left._constructor_sliced(right, index=left.columns) return right if isinstance(right, np.ndarray): if right.ndim == 1: right = to_series(right) elif right.ndim == 2: if right.shape == left.shape: right = left._constructor(right, index=left.index, columns=left.columns) elif right.shape[0] == left.shape[0] and right.shape[1] == 1: # Broadcast across columns right = np.broadcast_to(right, left.shape) right = left._constructor(right, index=left.index, columns=left.columns) elif right.shape[1] == left.shape[1] and right.shape[0] == 1: # Broadcast along rows right = to_series(right[0, :]) else: raise ValueError("Unable to coerce to DataFrame, shape " "must be {req_shape}: given {given_shape}" .format(req_shape=left.shape, given_shape=right.shape)) elif right.ndim > 2: raise ValueError('Unable to coerce to Series/DataFrame, dim ' 'must be <= 2: {dim}'.format(dim=right.shape)) elif (is_list_like(right) and not isinstance(right, (ABCSeries, ABCDataFrame))): # GH17901 right = to_series(right) return right def _arith_method_FRAME(cls, op, special): str_rep = _get_opstr(op, cls) op_name = _get_op_name(op, special) eval_kwargs = _gen_eval_kwargs(op_name) fill_zeros = _gen_fill_zeros(op_name) default_axis = _get_frame_op_default_axis(op_name) def na_op(x, y): import pandas.core.computation.expressions as expressions try: result = expressions.evaluate(op, str_rep, x, y, **eval_kwargs) except TypeError: result = masked_arith_op(x, y, op) result = missing.fill_zeros(result, x, y, op_name, fill_zeros) return result if op_name in _op_descriptions: # i.e. include "add" but not "__add__" doc = _make_flex_doc(op_name, 'dataframe') else: doc = _arith_doc_FRAME % op_name @Appender(doc) def f(self, other, axis=default_axis, level=None, fill_value=None): other = _align_method_FRAME(self, other, axis) if isinstance(other, ABCDataFrame): # Another DataFrame pass_op = op if should_series_dispatch(self, other, op) else na_op return self._combine_frame(other, pass_op, fill_value, level) elif isinstance(other, ABCSeries): # For these values of `axis`, we end up dispatching to Series op, # so do not want the masked op. pass_op = op if axis in [0, "columns", None] else na_op return _combine_series_frame(self, other, pass_op, fill_value=fill_value, axis=axis, level=level) else: if fill_value is not None: self = self.fillna(fill_value) assert np.ndim(other) == 0 return self._combine_const(other, op) f.__name__ = op_name return f def _flex_comp_method_FRAME(cls, op, special): str_rep = _get_opstr(op, cls) op_name = _get_op_name(op, special) default_axis = _get_frame_op_default_axis(op_name) def na_op(x, y): try: with np.errstate(invalid='ignore'): result = op(x, y) except TypeError: result = mask_cmp_op(x, y, op) return result doc = _flex_comp_doc_FRAME.format(op_name=op_name, desc=_op_descriptions[op_name]['desc']) @Appender(doc) def f(self, other, axis=default_axis, level=None): other = _align_method_FRAME(self, other, axis) if isinstance(other, ABCDataFrame): # Another DataFrame if not self._indexed_same(other): self, other = self.align(other, 'outer', level=level, copy=False) return dispatch_to_series(self, other, na_op, str_rep) elif isinstance(other, ABCSeries): return _combine_series_frame(self, other, na_op, fill_value=None, axis=axis, level=level) else: assert np.ndim(other) == 0, other return self._combine_const(other, na_op) f.__name__ = op_name return f def _comp_method_FRAME(cls, func, special): str_rep = _get_opstr(func, cls) op_name = _get_op_name(func, special) @Appender('Wrapper for comparison method {name}'.format(name=op_name)) def f(self, other): other = _align_method_FRAME(self, other, axis=None) if isinstance(other, ABCDataFrame): # Another DataFrame if not self._indexed_same(other): raise ValueError('Can only compare identically-labeled ' 'DataFrame objects') return dispatch_to_series(self, other, func, str_rep) elif isinstance(other, ABCSeries): return _combine_series_frame(self, other, func, fill_value=None, axis=None, level=None) else: # straight boolean comparisons we want to allow all columns # (regardless of dtype to pass thru) See #4537 for discussion. res = self._combine_const(other, func) return res.fillna(True).astype(bool) f.__name__ = op_name return f # ----------------------------------------------------------------------------- # Sparse def _cast_sparse_series_op(left, right, opname): """ For SparseSeries operation, coerce to float64 if the result is expected to have NaN or inf values Parameters ---------- left : SparseArray right : SparseArray opname : str Returns ------- left : SparseArray right : SparseArray """ from pandas.core.sparse.api import SparseDtype opname = opname.strip('_') # TODO: This should be moved to the array? if is_integer_dtype(left) and is_integer_dtype(right): # series coerces to float64 if result should have NaN/inf if opname in ('floordiv', 'mod') and (right.to_dense() == 0).any(): left = left.astype(SparseDtype(np.float64, left.fill_value)) right = right.astype(SparseDtype(np.float64, right.fill_value)) elif opname in ('rfloordiv', 'rmod') and (left.to_dense() == 0).any(): left = left.astype(SparseDtype(np.float64, left.fill_value)) right = right.astype(SparseDtype(np.float64, right.fill_value)) return left, right def _arith_method_SPARSE_SERIES(cls, op, special): """ Wrapper function for Series arithmetic operations, to avoid code duplication. """ op_name = _get_op_name(op, special) def wrapper(self, other): if isinstance(other, ABCDataFrame): return NotImplemented elif isinstance(other, ABCSeries): if not isinstance(other, ABCSparseSeries): other = other.to_sparse(fill_value=self.fill_value) return _sparse_series_op(self, other, op, op_name) elif is_scalar(other): with np.errstate(all='ignore'): new_values = op(self.values, other) return self._constructor(new_values, index=self.index, name=self.name) else: # pragma: no cover raise TypeError('operation with {other} not supported' .format(other=type(other))) wrapper.__name__ = op_name return wrapper def _sparse_series_op(left, right, op, name): left, right = left.align(right, join='outer', copy=False) new_index = left.index new_name = get_op_result_name(left, right) from pandas.core.arrays.sparse import _sparse_array_op lvalues, rvalues = _cast_sparse_series_op(left.values, right.values, name) result = _sparse_array_op(lvalues, rvalues, op, name) return left._constructor(result, index=new_index, name=new_name) def _arith_method_SPARSE_ARRAY(cls, op, special): """ Wrapper function for Series arithmetic operations, to avoid code duplication. """ op_name = _get_op_name(op, special) def wrapper(self, other): from pandas.core.arrays.sparse.array import ( SparseArray, _sparse_array_op, _wrap_result, _get_fill) if isinstance(other, np.ndarray): if len(self) != len(other): raise AssertionError("length mismatch: {self} vs. {other}" .format(self=len(self), other=len(other))) if not isinstance(other, SparseArray): dtype = getattr(other, 'dtype', None) other = SparseArray(other, fill_value=self.fill_value, dtype=dtype) return _sparse_array_op(self, other, op, op_name) elif is_scalar(other): with np.errstate(all='ignore'): fill = op(_get_fill(self), np.asarray(other)) result = op(self.sp_values, other) return _wrap_result(op_name, result, self.sp_index, fill) else: # pragma: no cover raise TypeError('operation with {other} not supported' .format(other=type(other))) wrapper.__name__ = op_name return wrapper
import json import numpy as np import pytest from pandas import DataFrame, Index import pandas.util.testing as tm from pandas.io.json import json_normalize from pandas.io.json.normalize import nested_to_record @pytest.fixture def deep_nested(): # deeply nested data return [{'country': 'USA', 'states': [{'name': 'California', 'cities': [{'name': 'San Francisco', 'pop': 12345}, {'name': 'Los Angeles', 'pop': 12346}] }, {'name': 'Ohio', 'cities': [{'name': 'Columbus', 'pop': 1234}, {'name': 'Cleveland', 'pop': 1236}]} ] }, {'country': 'Germany', 'states': [{'name': 'Bayern', 'cities': [{'name': 'Munich', 'pop': 12347}] }, {'name': 'Nordrhein-Westfalen', 'cities': [{'name': 'Duesseldorf', 'pop': 1238}, {'name': 'Koeln', 'pop': 1239}]} ] } ] @pytest.fixture def state_data(): return [ {'counties': [{'name': 'Dade', 'population': 12345}, {'name': 'Broward', 'population': 40000}, {'name': 'Palm Beach', 'population': 60000}], 'info': {'governor': 'Rick Scott'}, 'shortname': 'FL', 'state': 'Florida'}, {'counties': [{'name': 'Summit', 'population': 1234}, {'name': 'Cuyahoga', 'population': 1337}], 'info': {'governor': 'John Kasich'}, 'shortname': 'OH', 'state': 'Ohio'}] @pytest.fixture def author_missing_data(): return [ {'info': None}, {'info': {'created_at': '11/08/1993', 'last_updated': '26/05/2012'}, 'author_name': {'first': 'Jane', 'last_name': 'Doe'} }] @pytest.fixture def missing_metadata(): return [ {'name': 'Alice', 'addresses': [{'number': 9562, 'street': 'Morris St.', 'city': 'Massillon', 'state': 'OH', 'zip': 44646}] }, {'addresses': [{'number': 8449, 'street': 'Spring St.', 'city': 'Elizabethton', 'state': 'TN', 'zip': 37643}] } ] class TestJSONNormalize: def test_simple_records(self): recs = [{'a': 1, 'b': 2, 'c': 3}, {'a': 4, 'b': 5, 'c': 6}, {'a': 7, 'b': 8, 'c': 9}, {'a': 10, 'b': 11, 'c': 12}] result = json_normalize(recs) expected = DataFrame(recs) tm.assert_frame_equal(result, expected) def test_simple_normalize(self, state_data): result = json_normalize(state_data[0], 'counties') expected = DataFrame(state_data[0]['counties']) tm.assert_frame_equal(result, expected) result = json_normalize(state_data, 'counties') expected = [] for rec in state_data: expected.extend(rec['counties']) expected = DataFrame(expected) tm.assert_frame_equal(result, expected) result = json_normalize(state_data, 'counties', meta='state') expected['state'] = np.array(['Florida', 'Ohio']).repeat([3, 2]) tm.assert_frame_equal(result, expected) def test_empty_array(self): result = json_normalize([]) expected = DataFrame() tm.assert_frame_equal(result, expected) def test_simple_normalize_with_separator(self, deep_nested): # GH 14883 result = json_normalize({'A': {'A': 1, 'B': 2}}) expected = DataFrame([[1, 2]], columns=['A.A', 'A.B']) tm.assert_frame_equal(result.reindex_like(expected), expected) result = json_normalize({'A': {'A': 1, 'B': 2}}, sep='_') expected = DataFrame([[1, 2]], columns=['A_A', 'A_B']) tm.assert_frame_equal(result.reindex_like(expected), expected) result = json_normalize({'A': {'A': 1, 'B': 2}}, sep='\u03c3') expected = DataFrame([[1, 2]], columns=['A\u03c3A', 'A\u03c3B']) tm.assert_frame_equal(result.reindex_like(expected), expected) result = json_normalize(deep_nested, ['states', 'cities'], meta=['country', ['states', 'name']], sep='_') expected = Index(['name', 'pop', 'country', 'states_name']).sort_values() assert result.columns.sort_values().equals(expected) def test_value_array_record_prefix(self): # GH 21536 result = json_normalize({'A': [1, 2]}, 'A', record_prefix='Prefix.') expected = DataFrame([[1], [2]], columns=['Prefix.0']) tm.assert_frame_equal(result, expected) def test_nested_object_record_path(self): # GH 22706 data = {'state': 'Florida', 'info': { 'governor': 'Rick Scott', 'counties': [{'name': 'Dade', 'population': 12345}, {'name': 'Broward', 'population': 40000}, {'name': 'Palm Beach', 'population': 60000}]}} result = json_normalize(data, record_path=["info", "counties"]) expected = DataFrame([['Dade', 12345], ['Broward', 40000], ['Palm Beach', 60000]], columns=['name', 'population']) tm.assert_frame_equal(result, expected) def test_more_deeply_nested(self, deep_nested): result = json_normalize(deep_nested, ['states', 'cities'], meta=['country', ['states', 'name']]) # meta_prefix={'states': 'state_'}) ex_data = {'country': ['USA'] * 4 + ['Germany'] * 3, 'states.name': ['California', 'California', 'Ohio', 'Ohio', 'Bayern', 'Nordrhein-Westfalen', 'Nordrhein-Westfalen'], 'name': ['San Francisco', 'Los Angeles', 'Columbus', 'Cleveland', 'Munich', 'Duesseldorf', 'Koeln'], 'pop': [12345, 12346, 1234, 1236, 12347, 1238, 1239]} expected = DataFrame(ex_data, columns=result.columns) tm.assert_frame_equal(result, expected) def test_shallow_nested(self): data = [{'state': 'Florida', 'shortname': 'FL', 'info': { 'governor': 'Rick Scott' }, 'counties': [{'name': 'Dade', 'population': 12345}, {'name': 'Broward', 'population': 40000}, {'name': 'Palm Beach', 'population': 60000}]}, {'state': 'Ohio', 'shortname': 'OH', 'info': { 'governor': 'John Kasich' }, 'counties': [{'name': 'Summit', 'population': 1234}, {'name': 'Cuyahoga', 'population': 1337}]}] result = json_normalize(data, 'counties', ['state', 'shortname', ['info', 'governor']]) ex_data = {'name': ['Dade', 'Broward', 'Palm Beach', 'Summit', 'Cuyahoga'], 'state': ['Florida'] * 3 + ['Ohio'] * 2, 'shortname': ['FL', 'FL', 'FL', 'OH', 'OH'], 'info.governor': ['Rick Scott'] * 3 + ['John Kasich'] * 2, 'population': [12345, 40000, 60000, 1234, 1337]} expected = DataFrame(ex_data, columns=result.columns) tm.assert_frame_equal(result, expected) def test_meta_name_conflict(self): data = [{'foo': 'hello', 'bar': 'there', 'data': [{'foo': 'something', 'bar': 'else'}, {'foo': 'something2', 'bar': 'else2'}]}] msg = (r"Conflicting metadata name (foo|bar)," " need distinguishing prefix") with pytest.raises(ValueError, match=msg): json_normalize(data, 'data', meta=['foo', 'bar']) result = json_normalize(data, 'data', meta=['foo', 'bar'], meta_prefix='meta') for val in ['metafoo', 'metabar', 'foo', 'bar']: assert val in result def test_meta_parameter_not_modified(self): # GH 18610 data = [{'foo': 'hello', 'bar': 'there', 'data': [{'foo': 'something', 'bar': 'else'}, {'foo': 'something2', 'bar': 'else2'}]}] COLUMNS = ['foo', 'bar'] result = json_normalize(data, 'data', meta=COLUMNS, meta_prefix='meta') assert COLUMNS == ['foo', 'bar'] for val in ['metafoo', 'metabar', 'foo', 'bar']: assert val in result def test_record_prefix(self, state_data): result = json_normalize(state_data[0], 'counties') expected = DataFrame(state_data[0]['counties']) tm.assert_frame_equal(result, expected) result = json_normalize(state_data, 'counties', meta='state', record_prefix='county_') expected = [] for rec in state_data: expected.extend(rec['counties']) expected = DataFrame(expected) expected = expected.rename(columns=lambda x: 'county_' + x) expected['state'] = np.array(['Florida', 'Ohio']).repeat([3, 2]) tm.assert_frame_equal(result, expected) def test_non_ascii_key(self): testjson = ( b'[{"\xc3\x9cnic\xc3\xb8de":0,"sub":{"A":1, "B":2}},' + b'{"\xc3\x9cnic\xc3\xb8de":1,"sub":{"A":3, "B":4}}]' ).decode('utf8') testdata = { 'sub.A': [1, 3], 'sub.B': [2, 4], b"\xc3\x9cnic\xc3\xb8de".decode('utf8'): [0, 1] } expected = DataFrame(testdata) result = json_normalize(json.loads(testjson)) tm.assert_frame_equal(result, expected) def test_missing_field(self, author_missing_data): # GH20030: result = json_normalize(author_missing_data) ex_data = [ {'info': np.nan, 'author_name.first': np.nan, 'author_name.last_name': np.nan, 'info.created_at': np.nan, 'info.last_updated': np.nan}, {'info': None, 'author_name.first': 'Jane', 'author_name.last_name': 'Doe', 'info.created_at': '11/08/1993', 'info.last_updated': '26/05/2012'} ] expected = DataFrame(ex_data) tm.assert_frame_equal(result, expected) class TestNestedToRecord: def test_flat_stays_flat(self): recs = [dict(flat1=1, flat2=2), dict(flat1=3, flat2=4), ] result = nested_to_record(recs) expected = recs assert result == expected def test_one_level_deep_flattens(self): data = dict(flat1=1, dict1=dict(c=1, d=2)) result = nested_to_record(data) expected = {'dict1.c': 1, 'dict1.d': 2, 'flat1': 1} assert result == expected def test_nested_flattens(self): data = dict(flat1=1, dict1=dict(c=1, d=2), nested=dict(e=dict(c=1, d=2), d=2)) result = nested_to_record(data) expected = {'dict1.c': 1, 'dict1.d': 2, 'flat1': 1, 'nested.d': 2, 'nested.e.c': 1, 'nested.e.d': 2} assert result == expected def test_json_normalize_errors(self, missing_metadata): # GH14583: # If meta keys are not always present a new option to set # errors='ignore' has been implemented msg = ("Try running with errors='ignore' as key 'name'" " is not always present") with pytest.raises(KeyError, match=msg): json_normalize( data=missing_metadata, record_path='addresses', meta='name', errors='raise') def test_missing_meta(self, missing_metadata): # GH25468 # If metadata is nullable with errors set to ignore, the null values # should be numpy.nan values result = json_normalize( data=missing_metadata, record_path='addresses', meta='name', errors='ignore') ex_data = [ {'city': 'Massillon', 'number': 9562, 'state': 'OH', 'street': 'Morris St.', 'zip': 44646, 'name': 'Alice'}, {'city': 'Elizabethton', 'number': 8449, 'state': 'TN', 'street': 'Spring St.', 'zip': 37643, 'name': np.nan} ] ex_data = [ ['Massillon', 9562, 'OH', 'Morris St.', 44646, 'Alice'], ['Elizabethton', 8449, 'TN', 'Spring St.', 37643, np.nan] ] columns = ['city', 'number', 'state', 'street', 'zip', 'name'] expected = DataFrame(ex_data, columns=columns) tm.assert_frame_equal(result, expected) def test_donot_drop_nonevalues(self): # GH21356 data = [ {'info': None, 'author_name': {'first': 'Smith', 'last_name': 'Appleseed'} }, {'info': {'created_at': '11/08/1993', 'last_updated': '26/05/2012'}, 'author_name': {'first': 'Jane', 'last_name': 'Doe'} } ] result = nested_to_record(data) expected = [ {'info': None, 'author_name.first': 'Smith', 'author_name.last_name': 'Appleseed'}, {'author_name.first': 'Jane', 'author_name.last_name': 'Doe', 'info.created_at': '11/08/1993', 'info.last_updated': '26/05/2012'}] assert result == expected def test_nonetype_top_level_bottom_level(self): # GH21158: If inner level json has a key with a null value # make sure it doesnt do a new_d.pop twice and except data = { "id": None, "location": { "country": { "state": { "id": None, "town.info": { "id": None, "region": None, "x": 49.151580810546875, "y": -33.148521423339844, "z": 27.572303771972656}}} } } result = nested_to_record(data) expected = { 'id': None, 'location.country.state.id': None, 'location.country.state.town.info.id': None, 'location.country.state.town.info.region': None, 'location.country.state.town.info.x': 49.151580810546875, 'location.country.state.town.info.y': -33.148521423339844, 'location.country.state.town.info.z': 27.572303771972656} assert result == expected def test_nonetype_multiple_levels(self): # GH21158: If inner level json has a key with a null value # make sure it doesnt do a new_d.pop twice and except data = { "id": None, "location": { "id": None, "country": { "id": None, "state": { "id": None, "town.info": { "region": None, "x": 49.151580810546875, "y": -33.148521423339844, "z": 27.572303771972656}}} } } result = nested_to_record(data) expected = { 'id': None, 'location.id': None, 'location.country.id': None, 'location.country.state.id': None, 'location.country.state.town.info.region': None, 'location.country.state.town.info.x': 49.151580810546875, 'location.country.state.town.info.y': -33.148521423339844, 'location.country.state.town.info.z': 27.572303771972656} assert result == expected
cbertinato/pandas
pandas/tests/io/json/test_normalize.py
pandas/core/ops.py
# --------------------------------------------------------------------- # JSON normalization routines import copy from collections import defaultdict import numpy as np from pandas._libs.writers import convert_json_to_lines from pandas import compat, DataFrame def _convert_to_line_delimits(s): """Helper function that converts json lists to line delimited json.""" # Determine we have a JSON list to turn to lines otherwise just return the # json object, only lists can if not s[0] == '[' and s[-1] == ']': return s s = s[1:-1] return convert_json_to_lines(s) def nested_to_record(ds, prefix="", sep=".", level=0): """a simplified json_normalize converts a nested dict into a flat dict ("record"), unlike json_normalize, it does not attempt to extract a subset of the data. Parameters ---------- ds : dict or list of dicts prefix: the prefix, optional, default: "" sep : string, default '.' Nested records will generate names separated by sep, e.g., for sep='.', { 'foo' : { 'bar' : 0 } } -> foo.bar .. versionadded:: 0.20.0 level: the number of levels in the jason string, optional, default: 0 Returns ------- d - dict or list of dicts, matching `ds` Examples -------- IN[52]: nested_to_record(dict(flat1=1,dict1=dict(c=1,d=2), nested=dict(e=dict(c=1,d=2),d=2))) Out[52]: {'dict1.c': 1, 'dict1.d': 2, 'flat1': 1, 'nested.d': 2, 'nested.e.c': 1, 'nested.e.d': 2} """ singleton = False if isinstance(ds, dict): ds = [ds] singleton = True new_ds = [] for d in ds: new_d = copy.deepcopy(d) for k, v in d.items(): # each key gets renamed with prefix if not isinstance(k, compat.string_types): k = str(k) if level == 0: newkey = k else: newkey = prefix + sep + k # only dicts gets recurse-flattend # only at level>1 do we rename the rest of the keys if not isinstance(v, dict): if level != 0: # so we skip copying for top level, common case v = new_d.pop(k) new_d[newkey] = v continue else: v = new_d.pop(k) new_d.update(nested_to_record(v, newkey, sep, level + 1)) new_ds.append(new_d) if singleton: return new_ds[0] return new_ds def json_normalize(data, record_path=None, meta=None, meta_prefix=None, record_prefix=None, errors='raise', sep='.'): """ "Normalize" semi-structured JSON data into a flat table Parameters ---------- data : dict or list of dicts Unserialized JSON objects record_path : string or list of strings, default None Path in each object to list of records. If not passed, data will be assumed to be an array of records meta : list of paths (string or list of strings), default None Fields to use as metadata for each record in resulting table record_prefix : string, default None If True, prefix records with dotted (?) path, e.g. foo.bar.field if path to records is ['foo', 'bar'] meta_prefix : string, default None errors : {'raise', 'ignore'}, default 'raise' * 'ignore' : will ignore KeyError if keys listed in meta are not always present * 'raise' : will raise KeyError if keys listed in meta are not always present .. versionadded:: 0.20.0 sep : string, default '.' Nested records will generate names separated by sep, e.g., for sep='.', { 'foo' : { 'bar' : 0 } } -> foo.bar .. versionadded:: 0.20.0 Returns ------- frame : DataFrame Examples -------- >>> from pandas.io.json import json_normalize >>> data = [{'id': 1, 'name': {'first': 'Coleen', 'last': 'Volk'}}, ... {'name': {'given': 'Mose', 'family': 'Regner'}}, ... {'id': 2, 'name': 'Faye Raker'}] >>> json_normalize(data) id name name.family name.first name.given name.last 0 1.0 NaN NaN Coleen NaN Volk 1 NaN NaN Regner NaN Mose NaN 2 2.0 Faye Raker NaN NaN NaN NaN >>> data = [{'state': 'Florida', ... 'shortname': 'FL', ... 'info': { ... 'governor': 'Rick Scott' ... }, ... 'counties': [{'name': 'Dade', 'population': 12345}, ... {'name': 'Broward', 'population': 40000}, ... {'name': 'Palm Beach', 'population': 60000}]}, ... {'state': 'Ohio', ... 'shortname': 'OH', ... 'info': { ... 'governor': 'John Kasich' ... }, ... 'counties': [{'name': 'Summit', 'population': 1234}, ... {'name': 'Cuyahoga', 'population': 1337}]}] >>> result = json_normalize(data, 'counties', ['state', 'shortname', ... ['info', 'governor']]) >>> result name population info.governor state shortname 0 Dade 12345 Rick Scott Florida FL 1 Broward 40000 Rick Scott Florida FL 2 Palm Beach 60000 Rick Scott Florida FL 3 Summit 1234 John Kasich Ohio OH 4 Cuyahoga 1337 John Kasich Ohio OH >>> data = {'A': [1, 2]} >>> json_normalize(data, 'A', record_prefix='Prefix.') Prefix.0 0 1 1 2 """ def _pull_field(js, spec): result = js if isinstance(spec, list): for field in spec: result = result[field] else: result = result[spec] return result if isinstance(data, list) and not data: return DataFrame() # A bit of a hackjob if isinstance(data, dict): data = [data] if record_path is None: if any([[isinstance(x, dict) for x in compat.itervalues(y)] for y in data]): # naive normalization, this is idempotent for flat records # and potentially will inflate the data considerably for # deeply nested structures: # {VeryLong: { b: 1,c:2}} -> {VeryLong.b:1 ,VeryLong.c:@} # # TODO: handle record value which are lists, at least error # reasonably data = nested_to_record(data, sep=sep) return DataFrame(data) elif not isinstance(record_path, list): record_path = [record_path] if meta is None: meta = [] elif not isinstance(meta, list): meta = [meta] meta = [m if isinstance(m, list) else [m] for m in meta] # Disastrously inefficient for now records = [] lengths = [] meta_vals = defaultdict(list) if not isinstance(sep, compat.string_types): sep = str(sep) meta_keys = [sep.join(val) for val in meta] def _recursive_extract(data, path, seen_meta, level=0): if len(path) > 1: for obj in data: for val, key in zip(meta, meta_keys): if level + 1 == len(val): seen_meta[key] = _pull_field(obj, val[-1]) _recursive_extract(obj[path[0]], path[1:], seen_meta, level=level + 1) else: for obj in data: recs = _pull_field(obj, path[0]) # For repeating the metadata later lengths.append(len(recs)) for val, key in zip(meta, meta_keys): if level + 1 > len(val): meta_val = seen_meta[key] else: try: meta_val = _pull_field(obj, val[level:]) except KeyError as e: if errors == 'ignore': meta_val = np.nan else: raise \ KeyError("Try running with " "errors='ignore' as key " "{err} is not always present" .format(err=e)) meta_vals[key].append(meta_val) records.extend(recs) _recursive_extract(data, record_path, {}, level=0) result = DataFrame(records) if record_prefix is not None: result = result.rename( columns=lambda x: "{p}{c}".format(p=record_prefix, c=x)) # Data types, a problem for k, v in compat.iteritems(meta_vals): if meta_prefix is not None: k = meta_prefix + k if k in result: raise ValueError('Conflicting metadata name {name}, ' 'need distinguishing prefix '.format(name=k)) result[k] = np.array(v).repeat(lengths) return result
# -*- coding: utf-8 -*- import numpy as np import pandas as pd import pandas.util.testing as tm import pytest from pandas import Int64Index, MultiIndex, PeriodIndex, UInt64Index, isna from pandas._libs.tslib import iNaT from pandas.core.indexes.datetimelike import DatetimeIndexOpsMixin def test_fillna(idx): # GH 11343 # TODO: Remove or Refactor. Not Implemented for MultiIndex for name, index in [('idx', idx), ]: if len(index) == 0: pass elif isinstance(index, MultiIndex): idx = index.copy() msg = "isna is not defined for MultiIndex" with tm.assert_raises_regex(NotImplementedError, msg): idx.fillna(idx[0]) else: idx = index.copy() result = idx.fillna(idx[0]) tm.assert_index_equal(result, idx) assert result is not idx msg = "'value' must be a scalar, passed: " with tm.assert_raises_regex(TypeError, msg): idx.fillna([idx[0]]) idx = index.copy() values = idx.values if isinstance(index, DatetimeIndexOpsMixin): values[1] = iNaT elif isinstance(index, (Int64Index, UInt64Index)): continue else: values[1] = np.nan if isinstance(index, PeriodIndex): idx = index.__class__(values, freq=index.freq) else: idx = index.__class__(values) expected = np.array([False] * len(idx), dtype=bool) expected[1] = True tm.assert_numpy_array_equal(idx._isnan, expected) assert idx.hasnans def test_dropna(): # GH 6194 idx = pd.MultiIndex.from_arrays([[1, np.nan, 3, np.nan, 5], [1, 2, np.nan, np.nan, 5], ['a', 'b', 'c', np.nan, 'e']]) exp = pd.MultiIndex.from_arrays([[1, 5], [1, 5], ['a', 'e']]) tm.assert_index_equal(idx.dropna(), exp) tm.assert_index_equal(idx.dropna(how='any'), exp) exp = pd.MultiIndex.from_arrays([[1, np.nan, 3, 5], [1, 2, np.nan, 5], ['a', 'b', 'c', 'e']]) tm.assert_index_equal(idx.dropna(how='all'), exp) msg = "invalid how option: xxx" with tm.assert_raises_regex(ValueError, msg): idx.dropna(how='xxx') def test_nulls(idx): # this is really a smoke test for the methods # as these are adequately tested for function elsewhere # TODO: Remove or Refactor. MultiIndex not Implemeted. for name, index in [('idx', idx), ]: if len(index) == 0: tm.assert_numpy_array_equal( index.isna(), np.array([], dtype=bool)) elif isinstance(index, MultiIndex): idx = index.copy() msg = "isna is not defined for MultiIndex" with tm.assert_raises_regex(NotImplementedError, msg): idx.isna() else: if not index.hasnans: tm.assert_numpy_array_equal( index.isna(), np.zeros(len(index), dtype=bool)) tm.assert_numpy_array_equal( index.notna(), np.ones(len(index), dtype=bool)) else: result = isna(index) tm.assert_numpy_array_equal(index.isna(), result) tm.assert_numpy_array_equal(index.notna(), ~result) @pytest.mark.xfail def test_hasnans_isnans(idx): # GH 11343, added tests for hasnans / isnans index = idx.copy() # cases in indices doesn't include NaN expected = np.array([False] * len(index), dtype=bool) tm.assert_numpy_array_equal(index._isnan, expected) assert not index.hasnans index = idx.copy() values = index.values values[1] = np.nan index = idx.__class__(values) expected = np.array([False] * len(index), dtype=bool) expected[1] = True tm.assert_numpy_array_equal(index._isnan, expected) assert index.hasnans def test_nan_stays_float(): # GH 7031 idx0 = pd.MultiIndex(levels=[["A", "B"], []], labels=[[1, 0], [-1, -1]], names=[0, 1]) idx1 = pd.MultiIndex(levels=[["C"], ["D"]], labels=[[0], [0]], names=[0, 1]) idxm = idx0.join(idx1, how='outer') assert pd.isna(idx0.get_level_values(1)).all() # the following failed in 0.14.1 assert pd.isna(idxm.get_level_values(1)[:-1]).all() df0 = pd.DataFrame([[1, 2]], index=idx0) df1 = pd.DataFrame([[3, 4]], index=idx1) dfm = df0 - df1 assert pd.isna(df0.index.get_level_values(1)).all() # the following failed in 0.14.1 assert pd.isna(dfm.index.get_level_values(1)[:-1]).all()
kdebrab/pandas
pandas/tests/indexes/multi/test_missing.py
pandas/io/json/normalize.py
# -*- coding: utf-8 -*- import warnings import pandas as pd import pandas.util.testing as tm from pandas import MultiIndex, compat from pandas.compat import PY3, range, u def test_dtype_str(indices): dtype = indices.dtype_str assert isinstance(dtype, compat.string_types) assert dtype == str(indices.dtype) def test_format(idx): idx.format() idx[:0].format() def test_format_integer_names(): index = MultiIndex(levels=[[0, 1], [0, 1]], labels=[[0, 0, 1, 1], [0, 1, 0, 1]], names=[0, 1]) index.format(names=True) def test_format_sparse_config(idx): warn_filters = warnings.filters warnings.filterwarnings('ignore', category=FutureWarning, module=".*format") # GH1538 pd.set_option('display.multi_sparse', False) result = idx.format() assert result[1] == 'foo two' tm.reset_display_options() warnings.filters = warn_filters def test_format_sparse_display(): index = MultiIndex(levels=[[0, 1], [0, 1], [0, 1], [0]], labels=[[0, 0, 0, 1, 1, 1], [0, 0, 1, 0, 0, 1], [0, 1, 0, 0, 1, 0], [0, 0, 0, 0, 0, 0]]) result = index.format() assert result[3] == '1 0 0 0' def test_repr_with_unicode_data(): with pd.core.config.option_context("display.encoding", 'UTF-8'): d = {"a": [u("\u05d0"), 2, 3], "b": [4, 5, 6], "c": [7, 8, 9]} index = pd.DataFrame(d).set_index(["a", "b"]).index assert "\\u" not in repr(index) # we don't want unicode-escaped def test_repr_roundtrip(): mi = MultiIndex.from_product([list('ab'), range(3)], names=['first', 'second']) str(mi) if PY3: tm.assert_index_equal(eval(repr(mi)), mi, exact=True) else: result = eval(repr(mi)) # string coerces to unicode tm.assert_index_equal(result, mi, exact=False) assert mi.get_level_values('first').inferred_type == 'string' assert result.get_level_values('first').inferred_type == 'unicode' mi_u = MultiIndex.from_product( [list(u'ab'), range(3)], names=['first', 'second']) result = eval(repr(mi_u)) tm.assert_index_equal(result, mi_u, exact=True) # formatting if PY3: str(mi) else: compat.text_type(mi) # long format mi = MultiIndex.from_product([list('abcdefg'), range(10)], names=['first', 'second']) if PY3: tm.assert_index_equal(eval(repr(mi)), mi, exact=True) else: result = eval(repr(mi)) # string coerces to unicode tm.assert_index_equal(result, mi, exact=False) assert mi.get_level_values('first').inferred_type == 'string' assert result.get_level_values('first').inferred_type == 'unicode' result = eval(repr(mi_u)) tm.assert_index_equal(result, mi_u, exact=True) def test_str(): # tested elsewhere pass def test_unicode_string_with_unicode(): d = {"a": [u("\u05d0"), 2, 3], "b": [4, 5, 6], "c": [7, 8, 9]} idx = pd.DataFrame(d).set_index(["a", "b"]).index if PY3: str(idx) else: compat.text_type(idx) def test_bytestring_with_unicode(): d = {"a": [u("\u05d0"), 2, 3], "b": [4, 5, 6], "c": [7, 8, 9]} idx = pd.DataFrame(d).set_index(["a", "b"]).index if PY3: bytes(idx) else: str(idx) def test_repr_max_seq_item_setting(idx): # GH10182 idx = idx.repeat(50) with pd.option_context("display.max_seq_items", None): repr(idx) assert '...' not in str(idx)
# -*- coding: utf-8 -*- import numpy as np import pandas as pd import pandas.util.testing as tm import pytest from pandas import Int64Index, MultiIndex, PeriodIndex, UInt64Index, isna from pandas._libs.tslib import iNaT from pandas.core.indexes.datetimelike import DatetimeIndexOpsMixin def test_fillna(idx): # GH 11343 # TODO: Remove or Refactor. Not Implemented for MultiIndex for name, index in [('idx', idx), ]: if len(index) == 0: pass elif isinstance(index, MultiIndex): idx = index.copy() msg = "isna is not defined for MultiIndex" with tm.assert_raises_regex(NotImplementedError, msg): idx.fillna(idx[0]) else: idx = index.copy() result = idx.fillna(idx[0]) tm.assert_index_equal(result, idx) assert result is not idx msg = "'value' must be a scalar, passed: " with tm.assert_raises_regex(TypeError, msg): idx.fillna([idx[0]]) idx = index.copy() values = idx.values if isinstance(index, DatetimeIndexOpsMixin): values[1] = iNaT elif isinstance(index, (Int64Index, UInt64Index)): continue else: values[1] = np.nan if isinstance(index, PeriodIndex): idx = index.__class__(values, freq=index.freq) else: idx = index.__class__(values) expected = np.array([False] * len(idx), dtype=bool) expected[1] = True tm.assert_numpy_array_equal(idx._isnan, expected) assert idx.hasnans def test_dropna(): # GH 6194 idx = pd.MultiIndex.from_arrays([[1, np.nan, 3, np.nan, 5], [1, 2, np.nan, np.nan, 5], ['a', 'b', 'c', np.nan, 'e']]) exp = pd.MultiIndex.from_arrays([[1, 5], [1, 5], ['a', 'e']]) tm.assert_index_equal(idx.dropna(), exp) tm.assert_index_equal(idx.dropna(how='any'), exp) exp = pd.MultiIndex.from_arrays([[1, np.nan, 3, 5], [1, 2, np.nan, 5], ['a', 'b', 'c', 'e']]) tm.assert_index_equal(idx.dropna(how='all'), exp) msg = "invalid how option: xxx" with tm.assert_raises_regex(ValueError, msg): idx.dropna(how='xxx') def test_nulls(idx): # this is really a smoke test for the methods # as these are adequately tested for function elsewhere # TODO: Remove or Refactor. MultiIndex not Implemeted. for name, index in [('idx', idx), ]: if len(index) == 0: tm.assert_numpy_array_equal( index.isna(), np.array([], dtype=bool)) elif isinstance(index, MultiIndex): idx = index.copy() msg = "isna is not defined for MultiIndex" with tm.assert_raises_regex(NotImplementedError, msg): idx.isna() else: if not index.hasnans: tm.assert_numpy_array_equal( index.isna(), np.zeros(len(index), dtype=bool)) tm.assert_numpy_array_equal( index.notna(), np.ones(len(index), dtype=bool)) else: result = isna(index) tm.assert_numpy_array_equal(index.isna(), result) tm.assert_numpy_array_equal(index.notna(), ~result) @pytest.mark.xfail def test_hasnans_isnans(idx): # GH 11343, added tests for hasnans / isnans index = idx.copy() # cases in indices doesn't include NaN expected = np.array([False] * len(index), dtype=bool) tm.assert_numpy_array_equal(index._isnan, expected) assert not index.hasnans index = idx.copy() values = index.values values[1] = np.nan index = idx.__class__(values) expected = np.array([False] * len(index), dtype=bool) expected[1] = True tm.assert_numpy_array_equal(index._isnan, expected) assert index.hasnans def test_nan_stays_float(): # GH 7031 idx0 = pd.MultiIndex(levels=[["A", "B"], []], labels=[[1, 0], [-1, -1]], names=[0, 1]) idx1 = pd.MultiIndex(levels=[["C"], ["D"]], labels=[[0], [0]], names=[0, 1]) idxm = idx0.join(idx1, how='outer') assert pd.isna(idx0.get_level_values(1)).all() # the following failed in 0.14.1 assert pd.isna(idxm.get_level_values(1)[:-1]).all() df0 = pd.DataFrame([[1, 2]], index=idx0) df1 = pd.DataFrame([[3, 4]], index=idx1) dfm = df0 - df1 assert pd.isna(df0.index.get_level_values(1)).all() # the following failed in 0.14.1 assert pd.isna(dfm.index.get_level_values(1)[:-1]).all()
kdebrab/pandas
pandas/tests/indexes/multi/test_missing.py
pandas/tests/indexes/multi/test_format.py
"""Interfaces with TotalConnect sensors.""" import logging from homeassistant.components.binary_sensor import ( DEVICE_CLASS_DOOR, DEVICE_CLASS_GAS, DEVICE_CLASS_SMOKE, BinarySensorEntity, ) from .const import DOMAIN _LOGGER = logging.getLogger(__name__) async def async_setup_entry(hass, entry, async_add_entities) -> None: """Set up TotalConnect device sensors based on a config entry.""" sensors = [] client_locations = hass.data[DOMAIN][entry.entry_id].locations for location_id, location in client_locations.items(): for zone_id, zone in location.zones.items(): sensors.append(TotalConnectBinarySensor(zone_id, location_id, zone)) async_add_entities(sensors, True) class TotalConnectBinarySensor(BinarySensorEntity): """Represent an TotalConnect zone.""" def __init__(self, zone_id, location_id, zone): """Initialize the TotalConnect status.""" self._zone_id = zone_id self._location_id = location_id self._zone = zone self._name = self._zone.description self._unique_id = f"{location_id} {zone_id}" self._is_on = None self._is_tampered = None self._is_low_battery = None @property def unique_id(self): """Return the unique id.""" return self._unique_id @property def name(self): """Return the name of the device.""" return self._name def update(self): """Return the state of the device.""" self._is_tampered = self._zone.is_tampered() self._is_low_battery = self._zone.is_low_battery() if self._zone.is_faulted() or self._zone.is_triggered(): self._is_on = True else: self._is_on = False @property def is_on(self): """Return true if the binary sensor is on.""" return self._is_on @property def device_class(self): """Return the class of this device, from component DEVICE_CLASSES.""" if self._zone.is_type_security(): return DEVICE_CLASS_DOOR if self._zone.is_type_fire(): return DEVICE_CLASS_SMOKE if self._zone.is_type_carbon_monoxide(): return DEVICE_CLASS_GAS return None @property def device_state_attributes(self): """Return the state attributes.""" attributes = { "zone_id": self._zone_id, "location_id": self._location_id, "low_battery": self._is_low_battery, "tampered": self._is_tampered, } return attributes
"""The tests for the Prometheus exporter.""" from dataclasses import dataclass import datetime import pytest from homeassistant.components import climate, humidifier, sensor from homeassistant.components.demo.sensor import DemoSensor import homeassistant.components.prometheus as prometheus from homeassistant.const import ( CONCENTRATION_MICROGRAMS_PER_CUBIC_METER, DEGREE, DEVICE_CLASS_POWER, ENERGY_KILO_WATT_HOUR, EVENT_STATE_CHANGED, ) from homeassistant.core import split_entity_id from homeassistant.setup import async_setup_component from homeassistant.util import dt as dt_util import tests.async_mock as mock PROMETHEUS_PATH = "homeassistant.components.prometheus" @dataclass class FilterTest: """Class for capturing a filter test.""" id: str should_pass: bool async def prometheus_client(hass, hass_client): """Initialize an hass_client with Prometheus component.""" await async_setup_component(hass, prometheus.DOMAIN, {prometheus.DOMAIN: {}}) await async_setup_component(hass, sensor.DOMAIN, {"sensor": [{"platform": "demo"}]}) await async_setup_component( hass, climate.DOMAIN, {"climate": [{"platform": "demo"}]} ) await hass.async_block_till_done() await async_setup_component( hass, humidifier.DOMAIN, {"humidifier": [{"platform": "demo"}]} ) sensor1 = DemoSensor( None, "Television Energy", 74, None, ENERGY_KILO_WATT_HOUR, None ) sensor1.hass = hass sensor1.entity_id = "sensor.television_energy" await sensor1.async_update_ha_state() sensor2 = DemoSensor( None, "Radio Energy", 14, DEVICE_CLASS_POWER, ENERGY_KILO_WATT_HOUR, None ) sensor2.hass = hass sensor2.entity_id = "sensor.radio_energy" with mock.patch( "homeassistant.util.dt.utcnow", return_value=datetime.datetime(1970, 1, 2, tzinfo=dt_util.UTC), ): await sensor2.async_update_ha_state() sensor3 = DemoSensor( None, "Electricity price", 0.123, None, f"SEK/{ENERGY_KILO_WATT_HOUR}", None ) sensor3.hass = hass sensor3.entity_id = "sensor.electricity_price" await sensor3.async_update_ha_state() sensor4 = DemoSensor(None, "Wind Direction", 25, None, DEGREE, None) sensor4.hass = hass sensor4.entity_id = "sensor.wind_direction" await sensor4.async_update_ha_state() sensor5 = DemoSensor( None, "SPS30 PM <1µm Weight concentration", 3.7069, None, CONCENTRATION_MICROGRAMS_PER_CUBIC_METER, None, ) sensor5.hass = hass sensor5.entity_id = "sensor.sps30_pm_1um_weight_concentration" await sensor5.async_update_ha_state() return await hass_client() async def test_view(hass, hass_client): """Test prometheus metrics view.""" client = await prometheus_client(hass, hass_client) resp = await client.get(prometheus.API_ENDPOINT) assert resp.status == 200 assert resp.headers["content-type"] == "text/plain" body = await resp.text() body = body.split("\n") assert len(body) > 3 assert "# HELP python_info Python platform information" in body assert ( "# HELP python_gc_objects_collected_total " "Objects collected during gc" in body ) assert ( 'temperature_c{domain="sensor",' 'entity="sensor.outside_temperature",' 'friendly_name="Outside Temperature"} 15.6' in body ) assert ( 'battery_level_percent{domain="sensor",' 'entity="sensor.outside_temperature",' 'friendly_name="Outside Temperature"} 12.0' in body ) assert ( 'current_temperature_c{domain="climate",' 'entity="climate.heatpump",' 'friendly_name="HeatPump"} 25.0' in body ) assert ( 'humidifier_target_humidity_percent{domain="humidifier",' 'entity="humidifier.humidifier",' 'friendly_name="Humidifier"} 68.0' in body ) assert ( 'humidifier_state{domain="humidifier",' 'entity="humidifier.dehumidifier",' 'friendly_name="Dehumidifier"} 1.0' in body ) assert ( 'humidifier_mode{domain="humidifier",' 'entity="humidifier.hygrostat",' 'friendly_name="Hygrostat",' 'mode="home"} 1.0' in body ) assert ( 'humidifier_mode{domain="humidifier",' 'entity="humidifier.hygrostat",' 'friendly_name="Hygrostat",' 'mode="eco"} 0.0' in body ) assert ( 'humidity_percent{domain="sensor",' 'entity="sensor.outside_humidity",' 'friendly_name="Outside Humidity"} 54.0' in body ) assert ( 'sensor_unit_kwh{domain="sensor",' 'entity="sensor.television_energy",' 'friendly_name="Television Energy"} 74.0' in body ) assert ( 'power_kwh{domain="sensor",' 'entity="sensor.radio_energy",' 'friendly_name="Radio Energy"} 14.0' in body ) assert ( 'entity_available{domain="sensor",' 'entity="sensor.radio_energy",' 'friendly_name="Radio Energy"} 1.0' in body ) assert ( 'last_updated_time_seconds{domain="sensor",' 'entity="sensor.radio_energy",' 'friendly_name="Radio Energy"} 86400.0' in body ) assert ( 'sensor_unit_sek_per_kwh{domain="sensor",' 'entity="sensor.electricity_price",' 'friendly_name="Electricity price"} 0.123' in body ) assert ( 'sensor_unit_u0xb0{domain="sensor",' 'entity="sensor.wind_direction",' 'friendly_name="Wind Direction"} 25.0' in body ) assert ( 'sensor_unit_u0xb5g_per_mu0xb3{domain="sensor",' 'entity="sensor.sps30_pm_1um_weight_concentration",' 'friendly_name="SPS30 PM <1µm Weight concentration"} 3.7069' in body ) @pytest.fixture(name="mock_client") def mock_client_fixture(): """Mock the prometheus client.""" with mock.patch(f"{PROMETHEUS_PATH}.prometheus_client") as client: counter_client = mock.MagicMock() client.Counter = mock.MagicMock(return_value=counter_client) setattr(counter_client, "labels", mock.MagicMock(return_value=mock.MagicMock())) yield counter_client @pytest.fixture def mock_bus(hass): """Mock the event bus listener.""" hass.bus.listen = mock.MagicMock() @pytest.mark.usefixtures("mock_bus") async def test_minimal_config(hass, mock_client): """Test the minimal config and defaults of component.""" config = {prometheus.DOMAIN: {}} assert await async_setup_component(hass, prometheus.DOMAIN, config) await hass.async_block_till_done() assert hass.bus.listen.called assert EVENT_STATE_CHANGED == hass.bus.listen.call_args_list[0][0][0] @pytest.mark.usefixtures("mock_bus") async def test_full_config(hass, mock_client): """Test the full config of component.""" config = { prometheus.DOMAIN: { "namespace": "ns", "default_metric": "m", "override_metric": "m", "component_config": {"fake.test": {"override_metric": "km"}}, "component_config_glob": {"fake.time_*": {"override_metric": "h"}}, "component_config_domain": {"climate": {"override_metric": "°C"}}, "filter": { "include_domains": ["climate"], "include_entity_globs": ["fake.time_*"], "include_entities": ["fake.test"], "exclude_domains": ["script"], "exclude_entity_globs": ["climate.excluded_*"], "exclude_entities": ["fake.time_excluded"], }, } } assert await async_setup_component(hass, prometheus.DOMAIN, config) await hass.async_block_till_done() assert hass.bus.listen.called assert EVENT_STATE_CHANGED == hass.bus.listen.call_args_list[0][0][0] def make_event(entity_id): """Make a mock event for test.""" domain = split_entity_id(entity_id)[0] state = mock.MagicMock( state="not blank", domain=domain, entity_id=entity_id, object_id="entity", attributes={}, ) return mock.MagicMock(data={"new_state": state}, time_fired=12345) async def _setup(hass, filter_config): """Shared set up for filtering tests.""" config = {prometheus.DOMAIN: {"filter": filter_config}} assert await async_setup_component(hass, prometheus.DOMAIN, config) await hass.async_block_till_done() return hass.bus.listen.call_args_list[0][0][1] @pytest.mark.usefixtures("mock_bus") async def test_allowlist(hass, mock_client): """Test an allowlist only config.""" handler_method = await _setup( hass, { "include_domains": ["fake"], "include_entity_globs": ["test.included_*"], "include_entities": ["not_real.included"], }, ) tests = [ FilterTest("climate.excluded", False), FilterTest("fake.included", True), FilterTest("test.excluded_test", False), FilterTest("test.included_test", True), FilterTest("not_real.included", True), FilterTest("not_real.excluded", False), ] for test in tests: event = make_event(test.id) handler_method(event) was_called = mock_client.labels.call_count == 1 assert test.should_pass == was_called mock_client.labels.reset_mock() @pytest.mark.usefixtures("mock_bus") async def test_denylist(hass, mock_client): """Test a denylist only config.""" handler_method = await _setup( hass, { "exclude_domains": ["fake"], "exclude_entity_globs": ["test.excluded_*"], "exclude_entities": ["not_real.excluded"], }, ) tests = [ FilterTest("fake.excluded", False), FilterTest("light.included", True), FilterTest("test.excluded_test", False), FilterTest("test.included_test", True), FilterTest("not_real.included", True), FilterTest("not_real.excluded", False), ] for test in tests: event = make_event(test.id) handler_method(event) was_called = mock_client.labels.call_count == 1 assert test.should_pass == was_called mock_client.labels.reset_mock() @pytest.mark.usefixtures("mock_bus") async def test_filtered_denylist(hass, mock_client): """Test a denylist config with a filtering allowlist.""" handler_method = await _setup( hass, { "include_entities": ["fake.included", "test.excluded_test"], "exclude_domains": ["fake"], "exclude_entity_globs": ["*.excluded_*"], "exclude_entities": ["not_real.excluded"], }, ) tests = [ FilterTest("fake.excluded", False), FilterTest("fake.included", True), FilterTest("alt_fake.excluded_test", False), FilterTest("test.excluded_test", True), FilterTest("not_real.excluded", False), FilterTest("not_real.included", True), ] for test in tests: event = make_event(test.id) handler_method(event) was_called = mock_client.labels.call_count == 1 assert test.should_pass == was_called mock_client.labels.reset_mock()
tchellomello/home-assistant
tests/components/prometheus/test_init.py
homeassistant/components/totalconnect/binary_sensor.py
"""Device tracker for BMW Connected Drive vehicles.""" import logging from homeassistant.util import slugify from . import DOMAIN as BMW_DOMAIN _LOGGER = logging.getLogger(__name__) def setup_scanner(hass, config, see, discovery_info=None): """Set up the BMW tracker.""" accounts = hass.data[BMW_DOMAIN] _LOGGER.debug("Found BMW accounts: %s", ", ".join([a.name for a in accounts])) for account in accounts: for vehicle in account.account.vehicles: tracker = BMWDeviceTracker(see, vehicle) account.add_update_listener(tracker.update) tracker.update() return True class BMWDeviceTracker: """BMW Connected Drive device tracker.""" def __init__(self, see, vehicle): """Initialize the Tracker.""" self._see = see self.vehicle = vehicle def update(self) -> None: """Update the device info. Only update the state in Home Assistant if tracking in the car is enabled. """ dev_id = slugify(self.vehicle.name) if not self.vehicle.state.is_vehicle_tracking_enabled: _LOGGER.debug("Tracking is disabled for vehicle %s", dev_id) return _LOGGER.debug("Updating %s", dev_id) attrs = {"vin": self.vehicle.vin} self._see( dev_id=dev_id, host_name=self.vehicle.name, gps=self.vehicle.state.gps_position, attributes=attrs, icon="mdi:car", )
"""The tests for the Prometheus exporter.""" from dataclasses import dataclass import datetime import pytest from homeassistant.components import climate, humidifier, sensor from homeassistant.components.demo.sensor import DemoSensor import homeassistant.components.prometheus as prometheus from homeassistant.const import ( CONCENTRATION_MICROGRAMS_PER_CUBIC_METER, DEGREE, DEVICE_CLASS_POWER, ENERGY_KILO_WATT_HOUR, EVENT_STATE_CHANGED, ) from homeassistant.core import split_entity_id from homeassistant.setup import async_setup_component from homeassistant.util import dt as dt_util import tests.async_mock as mock PROMETHEUS_PATH = "homeassistant.components.prometheus" @dataclass class FilterTest: """Class for capturing a filter test.""" id: str should_pass: bool async def prometheus_client(hass, hass_client): """Initialize an hass_client with Prometheus component.""" await async_setup_component(hass, prometheus.DOMAIN, {prometheus.DOMAIN: {}}) await async_setup_component(hass, sensor.DOMAIN, {"sensor": [{"platform": "demo"}]}) await async_setup_component( hass, climate.DOMAIN, {"climate": [{"platform": "demo"}]} ) await hass.async_block_till_done() await async_setup_component( hass, humidifier.DOMAIN, {"humidifier": [{"platform": "demo"}]} ) sensor1 = DemoSensor( None, "Television Energy", 74, None, ENERGY_KILO_WATT_HOUR, None ) sensor1.hass = hass sensor1.entity_id = "sensor.television_energy" await sensor1.async_update_ha_state() sensor2 = DemoSensor( None, "Radio Energy", 14, DEVICE_CLASS_POWER, ENERGY_KILO_WATT_HOUR, None ) sensor2.hass = hass sensor2.entity_id = "sensor.radio_energy" with mock.patch( "homeassistant.util.dt.utcnow", return_value=datetime.datetime(1970, 1, 2, tzinfo=dt_util.UTC), ): await sensor2.async_update_ha_state() sensor3 = DemoSensor( None, "Electricity price", 0.123, None, f"SEK/{ENERGY_KILO_WATT_HOUR}", None ) sensor3.hass = hass sensor3.entity_id = "sensor.electricity_price" await sensor3.async_update_ha_state() sensor4 = DemoSensor(None, "Wind Direction", 25, None, DEGREE, None) sensor4.hass = hass sensor4.entity_id = "sensor.wind_direction" await sensor4.async_update_ha_state() sensor5 = DemoSensor( None, "SPS30 PM <1µm Weight concentration", 3.7069, None, CONCENTRATION_MICROGRAMS_PER_CUBIC_METER, None, ) sensor5.hass = hass sensor5.entity_id = "sensor.sps30_pm_1um_weight_concentration" await sensor5.async_update_ha_state() return await hass_client() async def test_view(hass, hass_client): """Test prometheus metrics view.""" client = await prometheus_client(hass, hass_client) resp = await client.get(prometheus.API_ENDPOINT) assert resp.status == 200 assert resp.headers["content-type"] == "text/plain" body = await resp.text() body = body.split("\n") assert len(body) > 3 assert "# HELP python_info Python platform information" in body assert ( "# HELP python_gc_objects_collected_total " "Objects collected during gc" in body ) assert ( 'temperature_c{domain="sensor",' 'entity="sensor.outside_temperature",' 'friendly_name="Outside Temperature"} 15.6' in body ) assert ( 'battery_level_percent{domain="sensor",' 'entity="sensor.outside_temperature",' 'friendly_name="Outside Temperature"} 12.0' in body ) assert ( 'current_temperature_c{domain="climate",' 'entity="climate.heatpump",' 'friendly_name="HeatPump"} 25.0' in body ) assert ( 'humidifier_target_humidity_percent{domain="humidifier",' 'entity="humidifier.humidifier",' 'friendly_name="Humidifier"} 68.0' in body ) assert ( 'humidifier_state{domain="humidifier",' 'entity="humidifier.dehumidifier",' 'friendly_name="Dehumidifier"} 1.0' in body ) assert ( 'humidifier_mode{domain="humidifier",' 'entity="humidifier.hygrostat",' 'friendly_name="Hygrostat",' 'mode="home"} 1.0' in body ) assert ( 'humidifier_mode{domain="humidifier",' 'entity="humidifier.hygrostat",' 'friendly_name="Hygrostat",' 'mode="eco"} 0.0' in body ) assert ( 'humidity_percent{domain="sensor",' 'entity="sensor.outside_humidity",' 'friendly_name="Outside Humidity"} 54.0' in body ) assert ( 'sensor_unit_kwh{domain="sensor",' 'entity="sensor.television_energy",' 'friendly_name="Television Energy"} 74.0' in body ) assert ( 'power_kwh{domain="sensor",' 'entity="sensor.radio_energy",' 'friendly_name="Radio Energy"} 14.0' in body ) assert ( 'entity_available{domain="sensor",' 'entity="sensor.radio_energy",' 'friendly_name="Radio Energy"} 1.0' in body ) assert ( 'last_updated_time_seconds{domain="sensor",' 'entity="sensor.radio_energy",' 'friendly_name="Radio Energy"} 86400.0' in body ) assert ( 'sensor_unit_sek_per_kwh{domain="sensor",' 'entity="sensor.electricity_price",' 'friendly_name="Electricity price"} 0.123' in body ) assert ( 'sensor_unit_u0xb0{domain="sensor",' 'entity="sensor.wind_direction",' 'friendly_name="Wind Direction"} 25.0' in body ) assert ( 'sensor_unit_u0xb5g_per_mu0xb3{domain="sensor",' 'entity="sensor.sps30_pm_1um_weight_concentration",' 'friendly_name="SPS30 PM <1µm Weight concentration"} 3.7069' in body ) @pytest.fixture(name="mock_client") def mock_client_fixture(): """Mock the prometheus client.""" with mock.patch(f"{PROMETHEUS_PATH}.prometheus_client") as client: counter_client = mock.MagicMock() client.Counter = mock.MagicMock(return_value=counter_client) setattr(counter_client, "labels", mock.MagicMock(return_value=mock.MagicMock())) yield counter_client @pytest.fixture def mock_bus(hass): """Mock the event bus listener.""" hass.bus.listen = mock.MagicMock() @pytest.mark.usefixtures("mock_bus") async def test_minimal_config(hass, mock_client): """Test the minimal config and defaults of component.""" config = {prometheus.DOMAIN: {}} assert await async_setup_component(hass, prometheus.DOMAIN, config) await hass.async_block_till_done() assert hass.bus.listen.called assert EVENT_STATE_CHANGED == hass.bus.listen.call_args_list[0][0][0] @pytest.mark.usefixtures("mock_bus") async def test_full_config(hass, mock_client): """Test the full config of component.""" config = { prometheus.DOMAIN: { "namespace": "ns", "default_metric": "m", "override_metric": "m", "component_config": {"fake.test": {"override_metric": "km"}}, "component_config_glob": {"fake.time_*": {"override_metric": "h"}}, "component_config_domain": {"climate": {"override_metric": "°C"}}, "filter": { "include_domains": ["climate"], "include_entity_globs": ["fake.time_*"], "include_entities": ["fake.test"], "exclude_domains": ["script"], "exclude_entity_globs": ["climate.excluded_*"], "exclude_entities": ["fake.time_excluded"], }, } } assert await async_setup_component(hass, prometheus.DOMAIN, config) await hass.async_block_till_done() assert hass.bus.listen.called assert EVENT_STATE_CHANGED == hass.bus.listen.call_args_list[0][0][0] def make_event(entity_id): """Make a mock event for test.""" domain = split_entity_id(entity_id)[0] state = mock.MagicMock( state="not blank", domain=domain, entity_id=entity_id, object_id="entity", attributes={}, ) return mock.MagicMock(data={"new_state": state}, time_fired=12345) async def _setup(hass, filter_config): """Shared set up for filtering tests.""" config = {prometheus.DOMAIN: {"filter": filter_config}} assert await async_setup_component(hass, prometheus.DOMAIN, config) await hass.async_block_till_done() return hass.bus.listen.call_args_list[0][0][1] @pytest.mark.usefixtures("mock_bus") async def test_allowlist(hass, mock_client): """Test an allowlist only config.""" handler_method = await _setup( hass, { "include_domains": ["fake"], "include_entity_globs": ["test.included_*"], "include_entities": ["not_real.included"], }, ) tests = [ FilterTest("climate.excluded", False), FilterTest("fake.included", True), FilterTest("test.excluded_test", False), FilterTest("test.included_test", True), FilterTest("not_real.included", True), FilterTest("not_real.excluded", False), ] for test in tests: event = make_event(test.id) handler_method(event) was_called = mock_client.labels.call_count == 1 assert test.should_pass == was_called mock_client.labels.reset_mock() @pytest.mark.usefixtures("mock_bus") async def test_denylist(hass, mock_client): """Test a denylist only config.""" handler_method = await _setup( hass, { "exclude_domains": ["fake"], "exclude_entity_globs": ["test.excluded_*"], "exclude_entities": ["not_real.excluded"], }, ) tests = [ FilterTest("fake.excluded", False), FilterTest("light.included", True), FilterTest("test.excluded_test", False), FilterTest("test.included_test", True), FilterTest("not_real.included", True), FilterTest("not_real.excluded", False), ] for test in tests: event = make_event(test.id) handler_method(event) was_called = mock_client.labels.call_count == 1 assert test.should_pass == was_called mock_client.labels.reset_mock() @pytest.mark.usefixtures("mock_bus") async def test_filtered_denylist(hass, mock_client): """Test a denylist config with a filtering allowlist.""" handler_method = await _setup( hass, { "include_entities": ["fake.included", "test.excluded_test"], "exclude_domains": ["fake"], "exclude_entity_globs": ["*.excluded_*"], "exclude_entities": ["not_real.excluded"], }, ) tests = [ FilterTest("fake.excluded", False), FilterTest("fake.included", True), FilterTest("alt_fake.excluded_test", False), FilterTest("test.excluded_test", True), FilterTest("not_real.excluded", False), FilterTest("not_real.included", True), ] for test in tests: event = make_event(test.id) handler_method(event) was_called = mock_client.labels.call_count == 1 assert test.should_pass == was_called mock_client.labels.reset_mock()
tchellomello/home-assistant
tests/components/prometheus/test_init.py
homeassistant/components/bmw_connected_drive/device_tracker.py
"""Support for HomematicIP Cloud binary sensor.""" import logging from typing import Any, Dict from homematicip.aio.device import ( AsyncAccelerationSensor, AsyncContactInterface, AsyncDevice, AsyncFullFlushContactInterface, AsyncMotionDetectorIndoor, AsyncMotionDetectorOutdoor, AsyncMotionDetectorPushButton, AsyncPluggableMainsFailureSurveillance, AsyncPresenceDetectorIndoor, AsyncRotaryHandleSensor, AsyncShutterContact, AsyncShutterContactMagnetic, AsyncSmokeDetector, AsyncTiltVibrationSensor, AsyncWaterSensor, AsyncWeatherSensor, AsyncWeatherSensorPlus, AsyncWeatherSensorPro, ) from homematicip.aio.group import AsyncSecurityGroup, AsyncSecurityZoneGroup from homematicip.base.enums import SmokeDetectorAlarmType, WindowState from homeassistant.components.binary_sensor import ( DEVICE_CLASS_BATTERY, DEVICE_CLASS_DOOR, DEVICE_CLASS_LIGHT, DEVICE_CLASS_MOISTURE, DEVICE_CLASS_MOTION, DEVICE_CLASS_MOVING, DEVICE_CLASS_OPENING, DEVICE_CLASS_POWER, DEVICE_CLASS_PRESENCE, DEVICE_CLASS_SAFETY, DEVICE_CLASS_SMOKE, BinarySensorEntity, ) from homeassistant.config_entries import ConfigEntry from homeassistant.helpers.typing import HomeAssistantType from . import DOMAIN as HMIPC_DOMAIN, HomematicipGenericEntity from .hap import HomematicipHAP _LOGGER = logging.getLogger(__name__) ATTR_ACCELERATION_SENSOR_MODE = "acceleration_sensor_mode" ATTR_ACCELERATION_SENSOR_NEUTRAL_POSITION = "acceleration_sensor_neutral_position" ATTR_ACCELERATION_SENSOR_SENSITIVITY = "acceleration_sensor_sensitivity" ATTR_ACCELERATION_SENSOR_TRIGGER_ANGLE = "acceleration_sensor_trigger_angle" ATTR_INTRUSION_ALARM = "intrusion_alarm" ATTR_MOISTURE_DETECTED = "moisture_detected" ATTR_MOTION_DETECTED = "motion_detected" ATTR_POWER_MAINS_FAILURE = "power_mains_failure" ATTR_PRESENCE_DETECTED = "presence_detected" ATTR_SMOKE_DETECTOR_ALARM = "smoke_detector_alarm" ATTR_TODAY_SUNSHINE_DURATION = "today_sunshine_duration_in_minutes" ATTR_WATER_LEVEL_DETECTED = "water_level_detected" ATTR_WINDOW_STATE = "window_state" GROUP_ATTRIBUTES = { "moistureDetected": ATTR_MOISTURE_DETECTED, "motionDetected": ATTR_MOTION_DETECTED, "powerMainsFailure": ATTR_POWER_MAINS_FAILURE, "presenceDetected": ATTR_PRESENCE_DETECTED, "waterlevelDetected": ATTR_WATER_LEVEL_DETECTED, } SAM_DEVICE_ATTRIBUTES = { "accelerationSensorNeutralPosition": ATTR_ACCELERATION_SENSOR_NEUTRAL_POSITION, "accelerationSensorMode": ATTR_ACCELERATION_SENSOR_MODE, "accelerationSensorSensitivity": ATTR_ACCELERATION_SENSOR_SENSITIVITY, "accelerationSensorTriggerAngle": ATTR_ACCELERATION_SENSOR_TRIGGER_ANGLE, } async def async_setup_entry( hass: HomeAssistantType, config_entry: ConfigEntry, async_add_entities ) -> None: """Set up the HomematicIP Cloud binary sensor from a config entry.""" hap = hass.data[HMIPC_DOMAIN][config_entry.unique_id] entities = [] for device in hap.home.devices: if isinstance(device, AsyncAccelerationSensor): entities.append(HomematicipAccelerationSensor(hap, device)) if isinstance(device, AsyncTiltVibrationSensor): entities.append(HomematicipTiltVibrationSensor(hap, device)) if isinstance(device, (AsyncContactInterface, AsyncFullFlushContactInterface)): entities.append(HomematicipContactInterface(hap, device)) if isinstance( device, (AsyncShutterContact, AsyncShutterContactMagnetic), ): entities.append(HomematicipShutterContact(hap, device)) if isinstance(device, AsyncRotaryHandleSensor): entities.append(HomematicipShutterContact(hap, device, True)) if isinstance( device, ( AsyncMotionDetectorIndoor, AsyncMotionDetectorOutdoor, AsyncMotionDetectorPushButton, ), ): entities.append(HomematicipMotionDetector(hap, device)) if isinstance(device, AsyncPluggableMainsFailureSurveillance): entities.append( HomematicipPluggableMainsFailureSurveillanceSensor(hap, device) ) if isinstance(device, AsyncPresenceDetectorIndoor): entities.append(HomematicipPresenceDetector(hap, device)) if isinstance(device, AsyncSmokeDetector): entities.append(HomematicipSmokeDetector(hap, device)) if isinstance(device, AsyncWaterSensor): entities.append(HomematicipWaterDetector(hap, device)) if isinstance(device, (AsyncWeatherSensorPlus, AsyncWeatherSensorPro)): entities.append(HomematicipRainSensor(hap, device)) if isinstance( device, (AsyncWeatherSensor, AsyncWeatherSensorPlus, AsyncWeatherSensorPro) ): entities.append(HomematicipStormSensor(hap, device)) entities.append(HomematicipSunshineSensor(hap, device)) if isinstance(device, AsyncDevice) and device.lowBat is not None: entities.append(HomematicipBatterySensor(hap, device)) for group in hap.home.groups: if isinstance(group, AsyncSecurityGroup): entities.append(HomematicipSecuritySensorGroup(hap, group)) elif isinstance(group, AsyncSecurityZoneGroup): entities.append(HomematicipSecurityZoneSensorGroup(hap, group)) if entities: async_add_entities(entities) class HomematicipBaseActionSensor(HomematicipGenericEntity, BinarySensorEntity): """Representation of the HomematicIP base action sensor.""" @property def device_class(self) -> str: """Return the class of this sensor.""" return DEVICE_CLASS_MOVING @property def is_on(self) -> bool: """Return true if acceleration is detected.""" return self._device.accelerationSensorTriggered @property def device_state_attributes(self) -> Dict[str, Any]: """Return the state attributes of the acceleration sensor.""" state_attr = super().device_state_attributes for attr, attr_key in SAM_DEVICE_ATTRIBUTES.items(): attr_value = getattr(self._device, attr, None) if attr_value: state_attr[attr_key] = attr_value return state_attr class HomematicipAccelerationSensor(HomematicipBaseActionSensor): """Representation of the HomematicIP acceleration sensor.""" class HomematicipTiltVibrationSensor(HomematicipBaseActionSensor): """Representation of the HomematicIP tilt vibration sensor.""" class HomematicipContactInterface(HomematicipGenericEntity, BinarySensorEntity): """Representation of the HomematicIP contact interface.""" @property def device_class(self) -> str: """Return the class of this sensor.""" return DEVICE_CLASS_OPENING @property def is_on(self) -> bool: """Return true if the contact interface is on/open.""" if self._device.windowState is None: return None return self._device.windowState != WindowState.CLOSED class HomematicipShutterContact(HomematicipGenericEntity, BinarySensorEntity): """Representation of the HomematicIP shutter contact.""" def __init__( self, hap: HomematicipHAP, device, has_additional_state: bool = False ) -> None: """Initialize the shutter contact.""" super().__init__(hap, device) self.has_additional_state = has_additional_state @property def device_class(self) -> str: """Return the class of this sensor.""" return DEVICE_CLASS_DOOR @property def is_on(self) -> bool: """Return true if the shutter contact is on/open.""" if self._device.windowState is None: return None return self._device.windowState != WindowState.CLOSED @property def device_state_attributes(self) -> Dict[str, Any]: """Return the state attributes of the Shutter Contact.""" state_attr = super().device_state_attributes if self.has_additional_state: window_state = getattr(self._device, "windowState", None) if window_state and window_state != WindowState.CLOSED: state_attr[ATTR_WINDOW_STATE] = window_state return state_attr class HomematicipMotionDetector(HomematicipGenericEntity, BinarySensorEntity): """Representation of the HomematicIP motion detector.""" @property def device_class(self) -> str: """Return the class of this sensor.""" return DEVICE_CLASS_MOTION @property def is_on(self) -> bool: """Return true if motion is detected.""" return self._device.motionDetected class HomematicipPresenceDetector(HomematicipGenericEntity, BinarySensorEntity): """Representation of the HomematicIP presence detector.""" @property def device_class(self) -> str: """Return the class of this sensor.""" return DEVICE_CLASS_PRESENCE @property def is_on(self) -> bool: """Return true if presence is detected.""" return self._device.presenceDetected class HomematicipSmokeDetector(HomematicipGenericEntity, BinarySensorEntity): """Representation of the HomematicIP smoke detector.""" @property def device_class(self) -> str: """Return the class of this sensor.""" return DEVICE_CLASS_SMOKE @property def is_on(self) -> bool: """Return true if smoke is detected.""" if self._device.smokeDetectorAlarmType: return ( self._device.smokeDetectorAlarmType == SmokeDetectorAlarmType.PRIMARY_ALARM ) return False class HomematicipWaterDetector(HomematicipGenericEntity, BinarySensorEntity): """Representation of the HomematicIP water detector.""" @property def device_class(self) -> str: """Return the class of this sensor.""" return DEVICE_CLASS_MOISTURE @property def is_on(self) -> bool: """Return true, if moisture or waterlevel is detected.""" return self._device.moistureDetected or self._device.waterlevelDetected class HomematicipStormSensor(HomematicipGenericEntity, BinarySensorEntity): """Representation of the HomematicIP storm sensor.""" def __init__(self, hap: HomematicipHAP, device) -> None: """Initialize storm sensor.""" super().__init__(hap, device, "Storm") @property def icon(self) -> str: """Return the icon.""" return "mdi:weather-windy" if self.is_on else "mdi:pinwheel-outline" @property def is_on(self) -> bool: """Return true, if storm is detected.""" return self._device.storm class HomematicipRainSensor(HomematicipGenericEntity, BinarySensorEntity): """Representation of the HomematicIP rain sensor.""" def __init__(self, hap: HomematicipHAP, device) -> None: """Initialize rain sensor.""" super().__init__(hap, device, "Raining") @property def device_class(self) -> str: """Return the class of this sensor.""" return DEVICE_CLASS_MOISTURE @property def is_on(self) -> bool: """Return true, if it is raining.""" return self._device.raining class HomematicipSunshineSensor(HomematicipGenericEntity, BinarySensorEntity): """Representation of the HomematicIP sunshine sensor.""" def __init__(self, hap: HomematicipHAP, device) -> None: """Initialize sunshine sensor.""" super().__init__(hap, device, "Sunshine") @property def device_class(self) -> str: """Return the class of this sensor.""" return DEVICE_CLASS_LIGHT @property def is_on(self) -> bool: """Return true if sun is shining.""" return self._device.sunshine @property def device_state_attributes(self) -> Dict[str, Any]: """Return the state attributes of the illuminance sensor.""" state_attr = super().device_state_attributes today_sunshine_duration = getattr(self._device, "todaySunshineDuration", None) if today_sunshine_duration: state_attr[ATTR_TODAY_SUNSHINE_DURATION] = today_sunshine_duration return state_attr class HomematicipBatterySensor(HomematicipGenericEntity, BinarySensorEntity): """Representation of the HomematicIP low battery sensor.""" def __init__(self, hap: HomematicipHAP, device) -> None: """Initialize battery sensor.""" super().__init__(hap, device, "Battery") @property def device_class(self) -> str: """Return the class of this sensor.""" return DEVICE_CLASS_BATTERY @property def is_on(self) -> bool: """Return true if battery is low.""" return self._device.lowBat class HomematicipPluggableMainsFailureSurveillanceSensor( HomematicipGenericEntity, BinarySensorEntity ): """Representation of the HomematicIP pluggable mains failure surveillance sensor.""" def __init__(self, hap: HomematicipHAP, device) -> None: """Initialize pluggable mains failure surveillance sensor.""" super().__init__(hap, device) @property def device_class(self) -> str: """Return the class of this sensor.""" return DEVICE_CLASS_POWER @property def is_on(self) -> bool: """Return true if power mains fails.""" return not self._device.powerMainsFailure class HomematicipSecurityZoneSensorGroup(HomematicipGenericEntity, BinarySensorEntity): """Representation of the HomematicIP security zone sensor group.""" def __init__(self, hap: HomematicipHAP, device, post: str = "SecurityZone") -> None: """Initialize security zone group.""" device.modelType = f"HmIP-{post}" super().__init__(hap, device, post) @property def device_class(self) -> str: """Return the class of this sensor.""" return DEVICE_CLASS_SAFETY @property def available(self) -> bool: """Security-Group available.""" # A security-group must be available, and should not be affected by # the individual availability of group members. return True @property def device_state_attributes(self) -> Dict[str, Any]: """Return the state attributes of the security zone group.""" state_attr = super().device_state_attributes for attr, attr_key in GROUP_ATTRIBUTES.items(): attr_value = getattr(self._device, attr, None) if attr_value: state_attr[attr_key] = attr_value window_state = getattr(self._device, "windowState", None) if window_state and window_state != WindowState.CLOSED: state_attr[ATTR_WINDOW_STATE] = str(window_state) return state_attr @property def is_on(self) -> bool: """Return true if security issue detected.""" if ( self._device.motionDetected or self._device.presenceDetected or self._device.unreach or self._device.sabotage ): return True if ( self._device.windowState is not None and self._device.windowState != WindowState.CLOSED ): return True return False class HomematicipSecuritySensorGroup( HomematicipSecurityZoneSensorGroup, BinarySensorEntity ): """Representation of the HomematicIP security group.""" def __init__(self, hap: HomematicipHAP, device) -> None: """Initialize security group.""" super().__init__(hap, device, "Sensors") @property def device_state_attributes(self) -> Dict[str, Any]: """Return the state attributes of the security group.""" state_attr = super().device_state_attributes smoke_detector_at = getattr(self._device, "smokeDetectorAlarmType", None) if smoke_detector_at: if smoke_detector_at == SmokeDetectorAlarmType.PRIMARY_ALARM: state_attr[ATTR_SMOKE_DETECTOR_ALARM] = str(smoke_detector_at) if smoke_detector_at == SmokeDetectorAlarmType.INTRUSION_ALARM: state_attr[ATTR_INTRUSION_ALARM] = str(smoke_detector_at) return state_attr @property def is_on(self) -> bool: """Return true if safety issue detected.""" parent_is_on = super().is_on if parent_is_on: return True if ( self._device.powerMainsFailure or self._device.moistureDetected or self._device.waterlevelDetected or self._device.lowBat or self._device.dutyCycle ): return True if ( self._device.smokeDetectorAlarmType is not None and self._device.smokeDetectorAlarmType != SmokeDetectorAlarmType.IDLE_OFF ): return True return False
"""The tests for the Prometheus exporter.""" from dataclasses import dataclass import datetime import pytest from homeassistant.components import climate, humidifier, sensor from homeassistant.components.demo.sensor import DemoSensor import homeassistant.components.prometheus as prometheus from homeassistant.const import ( CONCENTRATION_MICROGRAMS_PER_CUBIC_METER, DEGREE, DEVICE_CLASS_POWER, ENERGY_KILO_WATT_HOUR, EVENT_STATE_CHANGED, ) from homeassistant.core import split_entity_id from homeassistant.setup import async_setup_component from homeassistant.util import dt as dt_util import tests.async_mock as mock PROMETHEUS_PATH = "homeassistant.components.prometheus" @dataclass class FilterTest: """Class for capturing a filter test.""" id: str should_pass: bool async def prometheus_client(hass, hass_client): """Initialize an hass_client with Prometheus component.""" await async_setup_component(hass, prometheus.DOMAIN, {prometheus.DOMAIN: {}}) await async_setup_component(hass, sensor.DOMAIN, {"sensor": [{"platform": "demo"}]}) await async_setup_component( hass, climate.DOMAIN, {"climate": [{"platform": "demo"}]} ) await hass.async_block_till_done() await async_setup_component( hass, humidifier.DOMAIN, {"humidifier": [{"platform": "demo"}]} ) sensor1 = DemoSensor( None, "Television Energy", 74, None, ENERGY_KILO_WATT_HOUR, None ) sensor1.hass = hass sensor1.entity_id = "sensor.television_energy" await sensor1.async_update_ha_state() sensor2 = DemoSensor( None, "Radio Energy", 14, DEVICE_CLASS_POWER, ENERGY_KILO_WATT_HOUR, None ) sensor2.hass = hass sensor2.entity_id = "sensor.radio_energy" with mock.patch( "homeassistant.util.dt.utcnow", return_value=datetime.datetime(1970, 1, 2, tzinfo=dt_util.UTC), ): await sensor2.async_update_ha_state() sensor3 = DemoSensor( None, "Electricity price", 0.123, None, f"SEK/{ENERGY_KILO_WATT_HOUR}", None ) sensor3.hass = hass sensor3.entity_id = "sensor.electricity_price" await sensor3.async_update_ha_state() sensor4 = DemoSensor(None, "Wind Direction", 25, None, DEGREE, None) sensor4.hass = hass sensor4.entity_id = "sensor.wind_direction" await sensor4.async_update_ha_state() sensor5 = DemoSensor( None, "SPS30 PM <1µm Weight concentration", 3.7069, None, CONCENTRATION_MICROGRAMS_PER_CUBIC_METER, None, ) sensor5.hass = hass sensor5.entity_id = "sensor.sps30_pm_1um_weight_concentration" await sensor5.async_update_ha_state() return await hass_client() async def test_view(hass, hass_client): """Test prometheus metrics view.""" client = await prometheus_client(hass, hass_client) resp = await client.get(prometheus.API_ENDPOINT) assert resp.status == 200 assert resp.headers["content-type"] == "text/plain" body = await resp.text() body = body.split("\n") assert len(body) > 3 assert "# HELP python_info Python platform information" in body assert ( "# HELP python_gc_objects_collected_total " "Objects collected during gc" in body ) assert ( 'temperature_c{domain="sensor",' 'entity="sensor.outside_temperature",' 'friendly_name="Outside Temperature"} 15.6' in body ) assert ( 'battery_level_percent{domain="sensor",' 'entity="sensor.outside_temperature",' 'friendly_name="Outside Temperature"} 12.0' in body ) assert ( 'current_temperature_c{domain="climate",' 'entity="climate.heatpump",' 'friendly_name="HeatPump"} 25.0' in body ) assert ( 'humidifier_target_humidity_percent{domain="humidifier",' 'entity="humidifier.humidifier",' 'friendly_name="Humidifier"} 68.0' in body ) assert ( 'humidifier_state{domain="humidifier",' 'entity="humidifier.dehumidifier",' 'friendly_name="Dehumidifier"} 1.0' in body ) assert ( 'humidifier_mode{domain="humidifier",' 'entity="humidifier.hygrostat",' 'friendly_name="Hygrostat",' 'mode="home"} 1.0' in body ) assert ( 'humidifier_mode{domain="humidifier",' 'entity="humidifier.hygrostat",' 'friendly_name="Hygrostat",' 'mode="eco"} 0.0' in body ) assert ( 'humidity_percent{domain="sensor",' 'entity="sensor.outside_humidity",' 'friendly_name="Outside Humidity"} 54.0' in body ) assert ( 'sensor_unit_kwh{domain="sensor",' 'entity="sensor.television_energy",' 'friendly_name="Television Energy"} 74.0' in body ) assert ( 'power_kwh{domain="sensor",' 'entity="sensor.radio_energy",' 'friendly_name="Radio Energy"} 14.0' in body ) assert ( 'entity_available{domain="sensor",' 'entity="sensor.radio_energy",' 'friendly_name="Radio Energy"} 1.0' in body ) assert ( 'last_updated_time_seconds{domain="sensor",' 'entity="sensor.radio_energy",' 'friendly_name="Radio Energy"} 86400.0' in body ) assert ( 'sensor_unit_sek_per_kwh{domain="sensor",' 'entity="sensor.electricity_price",' 'friendly_name="Electricity price"} 0.123' in body ) assert ( 'sensor_unit_u0xb0{domain="sensor",' 'entity="sensor.wind_direction",' 'friendly_name="Wind Direction"} 25.0' in body ) assert ( 'sensor_unit_u0xb5g_per_mu0xb3{domain="sensor",' 'entity="sensor.sps30_pm_1um_weight_concentration",' 'friendly_name="SPS30 PM <1µm Weight concentration"} 3.7069' in body ) @pytest.fixture(name="mock_client") def mock_client_fixture(): """Mock the prometheus client.""" with mock.patch(f"{PROMETHEUS_PATH}.prometheus_client") as client: counter_client = mock.MagicMock() client.Counter = mock.MagicMock(return_value=counter_client) setattr(counter_client, "labels", mock.MagicMock(return_value=mock.MagicMock())) yield counter_client @pytest.fixture def mock_bus(hass): """Mock the event bus listener.""" hass.bus.listen = mock.MagicMock() @pytest.mark.usefixtures("mock_bus") async def test_minimal_config(hass, mock_client): """Test the minimal config and defaults of component.""" config = {prometheus.DOMAIN: {}} assert await async_setup_component(hass, prometheus.DOMAIN, config) await hass.async_block_till_done() assert hass.bus.listen.called assert EVENT_STATE_CHANGED == hass.bus.listen.call_args_list[0][0][0] @pytest.mark.usefixtures("mock_bus") async def test_full_config(hass, mock_client): """Test the full config of component.""" config = { prometheus.DOMAIN: { "namespace": "ns", "default_metric": "m", "override_metric": "m", "component_config": {"fake.test": {"override_metric": "km"}}, "component_config_glob": {"fake.time_*": {"override_metric": "h"}}, "component_config_domain": {"climate": {"override_metric": "°C"}}, "filter": { "include_domains": ["climate"], "include_entity_globs": ["fake.time_*"], "include_entities": ["fake.test"], "exclude_domains": ["script"], "exclude_entity_globs": ["climate.excluded_*"], "exclude_entities": ["fake.time_excluded"], }, } } assert await async_setup_component(hass, prometheus.DOMAIN, config) await hass.async_block_till_done() assert hass.bus.listen.called assert EVENT_STATE_CHANGED == hass.bus.listen.call_args_list[0][0][0] def make_event(entity_id): """Make a mock event for test.""" domain = split_entity_id(entity_id)[0] state = mock.MagicMock( state="not blank", domain=domain, entity_id=entity_id, object_id="entity", attributes={}, ) return mock.MagicMock(data={"new_state": state}, time_fired=12345) async def _setup(hass, filter_config): """Shared set up for filtering tests.""" config = {prometheus.DOMAIN: {"filter": filter_config}} assert await async_setup_component(hass, prometheus.DOMAIN, config) await hass.async_block_till_done() return hass.bus.listen.call_args_list[0][0][1] @pytest.mark.usefixtures("mock_bus") async def test_allowlist(hass, mock_client): """Test an allowlist only config.""" handler_method = await _setup( hass, { "include_domains": ["fake"], "include_entity_globs": ["test.included_*"], "include_entities": ["not_real.included"], }, ) tests = [ FilterTest("climate.excluded", False), FilterTest("fake.included", True), FilterTest("test.excluded_test", False), FilterTest("test.included_test", True), FilterTest("not_real.included", True), FilterTest("not_real.excluded", False), ] for test in tests: event = make_event(test.id) handler_method(event) was_called = mock_client.labels.call_count == 1 assert test.should_pass == was_called mock_client.labels.reset_mock() @pytest.mark.usefixtures("mock_bus") async def test_denylist(hass, mock_client): """Test a denylist only config.""" handler_method = await _setup( hass, { "exclude_domains": ["fake"], "exclude_entity_globs": ["test.excluded_*"], "exclude_entities": ["not_real.excluded"], }, ) tests = [ FilterTest("fake.excluded", False), FilterTest("light.included", True), FilterTest("test.excluded_test", False), FilterTest("test.included_test", True), FilterTest("not_real.included", True), FilterTest("not_real.excluded", False), ] for test in tests: event = make_event(test.id) handler_method(event) was_called = mock_client.labels.call_count == 1 assert test.should_pass == was_called mock_client.labels.reset_mock() @pytest.mark.usefixtures("mock_bus") async def test_filtered_denylist(hass, mock_client): """Test a denylist config with a filtering allowlist.""" handler_method = await _setup( hass, { "include_entities": ["fake.included", "test.excluded_test"], "exclude_domains": ["fake"], "exclude_entity_globs": ["*.excluded_*"], "exclude_entities": ["not_real.excluded"], }, ) tests = [ FilterTest("fake.excluded", False), FilterTest("fake.included", True), FilterTest("alt_fake.excluded_test", False), FilterTest("test.excluded_test", True), FilterTest("not_real.excluded", False), FilterTest("not_real.included", True), ] for test in tests: event = make_event(test.id) handler_method(event) was_called = mock_client.labels.call_count == 1 assert test.should_pass == was_called mock_client.labels.reset_mock()
tchellomello/home-assistant
tests/components/prometheus/test_init.py
homeassistant/components/homematicip_cloud/binary_sensor.py
"""Support for OVO Energy.""" from datetime import datetime, timedelta import logging from typing import Any, Dict import aiohttp import async_timeout from ovoenergy import OVODailyUsage from ovoenergy.ovoenergy import OVOEnergy from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_PASSWORD, CONF_USERNAME from homeassistant.exceptions import ConfigEntryNotReady from homeassistant.helpers.typing import ConfigType, HomeAssistantType from homeassistant.helpers.update_coordinator import ( CoordinatorEntity, DataUpdateCoordinator, ) from .const import DATA_CLIENT, DATA_COORDINATOR, DOMAIN _LOGGER = logging.getLogger(__name__) async def async_setup(hass: HomeAssistantType, config: ConfigType) -> bool: """Set up the OVO Energy components.""" return True async def async_setup_entry(hass: HomeAssistantType, entry: ConfigEntry) -> bool: """Set up OVO Energy from a config entry.""" client = OVOEnergy() try: await client.authenticate(entry.data[CONF_USERNAME], entry.data[CONF_PASSWORD]) except aiohttp.ClientError as exception: _LOGGER.warning(exception) raise ConfigEntryNotReady from exception async def async_update_data() -> OVODailyUsage: """Fetch data from OVO Energy.""" now = datetime.utcnow() async with async_timeout.timeout(10): try: await client.authenticate( entry.data[CONF_USERNAME], entry.data[CONF_PASSWORD] ) return await client.get_daily_usage(now.strftime("%Y-%m")) except aiohttp.ClientError as exception: _LOGGER.warning(exception) return None coordinator = DataUpdateCoordinator( hass, _LOGGER, # Name of the data. For logging purposes. name="sensor", update_method=async_update_data, # Polling interval. Will only be polled if there are subscribers. update_interval=timedelta(seconds=300), ) hass.data.setdefault(DOMAIN, {}) hass.data[DOMAIN][entry.entry_id] = { DATA_CLIENT: client, DATA_COORDINATOR: coordinator, } # Fetch initial data so we have data when entities subscribe await coordinator.async_refresh() # Setup components hass.async_create_task( hass.config_entries.async_forward_entry_setup(entry, "sensor") ) return True async def async_unload_entry(hass: HomeAssistantType, entry: ConfigType) -> bool: """Unload OVO Energy config entry.""" # Unload sensors await hass.config_entries.async_forward_entry_unload(entry, "sensor") del hass.data[DOMAIN][entry.entry_id] return True class OVOEnergyEntity(CoordinatorEntity): """Defines a base OVO Energy entity.""" def __init__( self, coordinator: DataUpdateCoordinator, client: OVOEnergy, key: str, name: str, icon: str, ) -> None: """Initialize the OVO Energy entity.""" super().__init__(coordinator) self._client = client self._key = key self._name = name self._icon = icon self._available = True @property def unique_id(self) -> str: """Return the unique ID for this sensor.""" return self._key @property def name(self) -> str: """Return the name of the entity.""" return self._name @property def icon(self) -> str: """Return the mdi icon of the entity.""" return self._icon @property def available(self) -> bool: """Return True if entity is available.""" return self.coordinator.last_update_success and self._available class OVOEnergyDeviceEntity(OVOEnergyEntity): """Defines a OVO Energy device entity.""" @property def device_info(self) -> Dict[str, Any]: """Return device information about this OVO Energy instance.""" return { "identifiers": {(DOMAIN, self._client.account_id)}, "manufacturer": "OVO Energy", "name": self._client.account_id, "entry_type": "service", }
"""The tests for the Prometheus exporter.""" from dataclasses import dataclass import datetime import pytest from homeassistant.components import climate, humidifier, sensor from homeassistant.components.demo.sensor import DemoSensor import homeassistant.components.prometheus as prometheus from homeassistant.const import ( CONCENTRATION_MICROGRAMS_PER_CUBIC_METER, DEGREE, DEVICE_CLASS_POWER, ENERGY_KILO_WATT_HOUR, EVENT_STATE_CHANGED, ) from homeassistant.core import split_entity_id from homeassistant.setup import async_setup_component from homeassistant.util import dt as dt_util import tests.async_mock as mock PROMETHEUS_PATH = "homeassistant.components.prometheus" @dataclass class FilterTest: """Class for capturing a filter test.""" id: str should_pass: bool async def prometheus_client(hass, hass_client): """Initialize an hass_client with Prometheus component.""" await async_setup_component(hass, prometheus.DOMAIN, {prometheus.DOMAIN: {}}) await async_setup_component(hass, sensor.DOMAIN, {"sensor": [{"platform": "demo"}]}) await async_setup_component( hass, climate.DOMAIN, {"climate": [{"platform": "demo"}]} ) await hass.async_block_till_done() await async_setup_component( hass, humidifier.DOMAIN, {"humidifier": [{"platform": "demo"}]} ) sensor1 = DemoSensor( None, "Television Energy", 74, None, ENERGY_KILO_WATT_HOUR, None ) sensor1.hass = hass sensor1.entity_id = "sensor.television_energy" await sensor1.async_update_ha_state() sensor2 = DemoSensor( None, "Radio Energy", 14, DEVICE_CLASS_POWER, ENERGY_KILO_WATT_HOUR, None ) sensor2.hass = hass sensor2.entity_id = "sensor.radio_energy" with mock.patch( "homeassistant.util.dt.utcnow", return_value=datetime.datetime(1970, 1, 2, tzinfo=dt_util.UTC), ): await sensor2.async_update_ha_state() sensor3 = DemoSensor( None, "Electricity price", 0.123, None, f"SEK/{ENERGY_KILO_WATT_HOUR}", None ) sensor3.hass = hass sensor3.entity_id = "sensor.electricity_price" await sensor3.async_update_ha_state() sensor4 = DemoSensor(None, "Wind Direction", 25, None, DEGREE, None) sensor4.hass = hass sensor4.entity_id = "sensor.wind_direction" await sensor4.async_update_ha_state() sensor5 = DemoSensor( None, "SPS30 PM <1µm Weight concentration", 3.7069, None, CONCENTRATION_MICROGRAMS_PER_CUBIC_METER, None, ) sensor5.hass = hass sensor5.entity_id = "sensor.sps30_pm_1um_weight_concentration" await sensor5.async_update_ha_state() return await hass_client() async def test_view(hass, hass_client): """Test prometheus metrics view.""" client = await prometheus_client(hass, hass_client) resp = await client.get(prometheus.API_ENDPOINT) assert resp.status == 200 assert resp.headers["content-type"] == "text/plain" body = await resp.text() body = body.split("\n") assert len(body) > 3 assert "# HELP python_info Python platform information" in body assert ( "# HELP python_gc_objects_collected_total " "Objects collected during gc" in body ) assert ( 'temperature_c{domain="sensor",' 'entity="sensor.outside_temperature",' 'friendly_name="Outside Temperature"} 15.6' in body ) assert ( 'battery_level_percent{domain="sensor",' 'entity="sensor.outside_temperature",' 'friendly_name="Outside Temperature"} 12.0' in body ) assert ( 'current_temperature_c{domain="climate",' 'entity="climate.heatpump",' 'friendly_name="HeatPump"} 25.0' in body ) assert ( 'humidifier_target_humidity_percent{domain="humidifier",' 'entity="humidifier.humidifier",' 'friendly_name="Humidifier"} 68.0' in body ) assert ( 'humidifier_state{domain="humidifier",' 'entity="humidifier.dehumidifier",' 'friendly_name="Dehumidifier"} 1.0' in body ) assert ( 'humidifier_mode{domain="humidifier",' 'entity="humidifier.hygrostat",' 'friendly_name="Hygrostat",' 'mode="home"} 1.0' in body ) assert ( 'humidifier_mode{domain="humidifier",' 'entity="humidifier.hygrostat",' 'friendly_name="Hygrostat",' 'mode="eco"} 0.0' in body ) assert ( 'humidity_percent{domain="sensor",' 'entity="sensor.outside_humidity",' 'friendly_name="Outside Humidity"} 54.0' in body ) assert ( 'sensor_unit_kwh{domain="sensor",' 'entity="sensor.television_energy",' 'friendly_name="Television Energy"} 74.0' in body ) assert ( 'power_kwh{domain="sensor",' 'entity="sensor.radio_energy",' 'friendly_name="Radio Energy"} 14.0' in body ) assert ( 'entity_available{domain="sensor",' 'entity="sensor.radio_energy",' 'friendly_name="Radio Energy"} 1.0' in body ) assert ( 'last_updated_time_seconds{domain="sensor",' 'entity="sensor.radio_energy",' 'friendly_name="Radio Energy"} 86400.0' in body ) assert ( 'sensor_unit_sek_per_kwh{domain="sensor",' 'entity="sensor.electricity_price",' 'friendly_name="Electricity price"} 0.123' in body ) assert ( 'sensor_unit_u0xb0{domain="sensor",' 'entity="sensor.wind_direction",' 'friendly_name="Wind Direction"} 25.0' in body ) assert ( 'sensor_unit_u0xb5g_per_mu0xb3{domain="sensor",' 'entity="sensor.sps30_pm_1um_weight_concentration",' 'friendly_name="SPS30 PM <1µm Weight concentration"} 3.7069' in body ) @pytest.fixture(name="mock_client") def mock_client_fixture(): """Mock the prometheus client.""" with mock.patch(f"{PROMETHEUS_PATH}.prometheus_client") as client: counter_client = mock.MagicMock() client.Counter = mock.MagicMock(return_value=counter_client) setattr(counter_client, "labels", mock.MagicMock(return_value=mock.MagicMock())) yield counter_client @pytest.fixture def mock_bus(hass): """Mock the event bus listener.""" hass.bus.listen = mock.MagicMock() @pytest.mark.usefixtures("mock_bus") async def test_minimal_config(hass, mock_client): """Test the minimal config and defaults of component.""" config = {prometheus.DOMAIN: {}} assert await async_setup_component(hass, prometheus.DOMAIN, config) await hass.async_block_till_done() assert hass.bus.listen.called assert EVENT_STATE_CHANGED == hass.bus.listen.call_args_list[0][0][0] @pytest.mark.usefixtures("mock_bus") async def test_full_config(hass, mock_client): """Test the full config of component.""" config = { prometheus.DOMAIN: { "namespace": "ns", "default_metric": "m", "override_metric": "m", "component_config": {"fake.test": {"override_metric": "km"}}, "component_config_glob": {"fake.time_*": {"override_metric": "h"}}, "component_config_domain": {"climate": {"override_metric": "°C"}}, "filter": { "include_domains": ["climate"], "include_entity_globs": ["fake.time_*"], "include_entities": ["fake.test"], "exclude_domains": ["script"], "exclude_entity_globs": ["climate.excluded_*"], "exclude_entities": ["fake.time_excluded"], }, } } assert await async_setup_component(hass, prometheus.DOMAIN, config) await hass.async_block_till_done() assert hass.bus.listen.called assert EVENT_STATE_CHANGED == hass.bus.listen.call_args_list[0][0][0] def make_event(entity_id): """Make a mock event for test.""" domain = split_entity_id(entity_id)[0] state = mock.MagicMock( state="not blank", domain=domain, entity_id=entity_id, object_id="entity", attributes={}, ) return mock.MagicMock(data={"new_state": state}, time_fired=12345) async def _setup(hass, filter_config): """Shared set up for filtering tests.""" config = {prometheus.DOMAIN: {"filter": filter_config}} assert await async_setup_component(hass, prometheus.DOMAIN, config) await hass.async_block_till_done() return hass.bus.listen.call_args_list[0][0][1] @pytest.mark.usefixtures("mock_bus") async def test_allowlist(hass, mock_client): """Test an allowlist only config.""" handler_method = await _setup( hass, { "include_domains": ["fake"], "include_entity_globs": ["test.included_*"], "include_entities": ["not_real.included"], }, ) tests = [ FilterTest("climate.excluded", False), FilterTest("fake.included", True), FilterTest("test.excluded_test", False), FilterTest("test.included_test", True), FilterTest("not_real.included", True), FilterTest("not_real.excluded", False), ] for test in tests: event = make_event(test.id) handler_method(event) was_called = mock_client.labels.call_count == 1 assert test.should_pass == was_called mock_client.labels.reset_mock() @pytest.mark.usefixtures("mock_bus") async def test_denylist(hass, mock_client): """Test a denylist only config.""" handler_method = await _setup( hass, { "exclude_domains": ["fake"], "exclude_entity_globs": ["test.excluded_*"], "exclude_entities": ["not_real.excluded"], }, ) tests = [ FilterTest("fake.excluded", False), FilterTest("light.included", True), FilterTest("test.excluded_test", False), FilterTest("test.included_test", True), FilterTest("not_real.included", True), FilterTest("not_real.excluded", False), ] for test in tests: event = make_event(test.id) handler_method(event) was_called = mock_client.labels.call_count == 1 assert test.should_pass == was_called mock_client.labels.reset_mock() @pytest.mark.usefixtures("mock_bus") async def test_filtered_denylist(hass, mock_client): """Test a denylist config with a filtering allowlist.""" handler_method = await _setup( hass, { "include_entities": ["fake.included", "test.excluded_test"], "exclude_domains": ["fake"], "exclude_entity_globs": ["*.excluded_*"], "exclude_entities": ["not_real.excluded"], }, ) tests = [ FilterTest("fake.excluded", False), FilterTest("fake.included", True), FilterTest("alt_fake.excluded_test", False), FilterTest("test.excluded_test", True), FilterTest("not_real.excluded", False), FilterTest("not_real.included", True), ] for test in tests: event = make_event(test.id) handler_method(event) was_called = mock_client.labels.call_count == 1 assert test.should_pass == was_called mock_client.labels.reset_mock()
tchellomello/home-assistant
tests/components/prometheus/test_init.py
homeassistant/components/ovo_energy/__init__.py
"""Support for turning on and off Pi-hole system.""" import logging from hole.exceptions import HoleError import voluptuous as vol from homeassistant.components.switch import SwitchEntity from homeassistant.const import CONF_NAME from homeassistant.helpers import config_validation as cv, entity_platform from . import PiHoleEntity from .const import ( DATA_KEY_API, DATA_KEY_COORDINATOR, DOMAIN as PIHOLE_DOMAIN, SERVICE_DISABLE, SERVICE_DISABLE_ATTR_DURATION, ) _LOGGER = logging.getLogger(__name__) async def async_setup_entry(hass, entry, async_add_entities): """Set up the Pi-hole switch.""" name = entry.data[CONF_NAME] hole_data = hass.data[PIHOLE_DOMAIN][entry.entry_id] switches = [ PiHoleSwitch( hole_data[DATA_KEY_API], hole_data[DATA_KEY_COORDINATOR], name, entry.entry_id, ) ] async_add_entities(switches, True) # register service platform = entity_platform.current_platform.get() platform.async_register_entity_service( SERVICE_DISABLE, { vol.Required(SERVICE_DISABLE_ATTR_DURATION): vol.All( cv.time_period_str, cv.positive_timedelta ), }, "async_disable", ) class PiHoleSwitch(PiHoleEntity, SwitchEntity): """Representation of a Pi-hole switch.""" @property def name(self): """Return the name of the switch.""" return self._name @property def unique_id(self): """Return the unique id of the switch.""" return f"{self._server_unique_id}/Switch" @property def icon(self): """Icon to use in the frontend, if any.""" return "mdi:pi-hole" @property def is_on(self): """Return if the service is on.""" return self.api.data.get("status") == "enabled" async def async_turn_on(self, **kwargs): """Turn on the service.""" try: await self.api.enable() await self.async_update() except HoleError as err: _LOGGER.error("Unable to enable Pi-hole: %s", err) async def async_turn_off(self, **kwargs): """Turn off the service.""" await self.async_disable() async def async_disable(self, duration=None): """Disable the service for a given duration.""" duration_seconds = True # Disable infinitely by default if duration is not None: duration_seconds = duration.total_seconds() _LOGGER.debug( "Disabling Pi-hole '%s' (%s) for %d seconds", self.name, self.api.host, duration_seconds, ) try: await self.api.disable(duration_seconds) await self.async_update() except HoleError as err: _LOGGER.error("Unable to disable Pi-hole: %s", err)
"""The tests for the Prometheus exporter.""" from dataclasses import dataclass import datetime import pytest from homeassistant.components import climate, humidifier, sensor from homeassistant.components.demo.sensor import DemoSensor import homeassistant.components.prometheus as prometheus from homeassistant.const import ( CONCENTRATION_MICROGRAMS_PER_CUBIC_METER, DEGREE, DEVICE_CLASS_POWER, ENERGY_KILO_WATT_HOUR, EVENT_STATE_CHANGED, ) from homeassistant.core import split_entity_id from homeassistant.setup import async_setup_component from homeassistant.util import dt as dt_util import tests.async_mock as mock PROMETHEUS_PATH = "homeassistant.components.prometheus" @dataclass class FilterTest: """Class for capturing a filter test.""" id: str should_pass: bool async def prometheus_client(hass, hass_client): """Initialize an hass_client with Prometheus component.""" await async_setup_component(hass, prometheus.DOMAIN, {prometheus.DOMAIN: {}}) await async_setup_component(hass, sensor.DOMAIN, {"sensor": [{"platform": "demo"}]}) await async_setup_component( hass, climate.DOMAIN, {"climate": [{"platform": "demo"}]} ) await hass.async_block_till_done() await async_setup_component( hass, humidifier.DOMAIN, {"humidifier": [{"platform": "demo"}]} ) sensor1 = DemoSensor( None, "Television Energy", 74, None, ENERGY_KILO_WATT_HOUR, None ) sensor1.hass = hass sensor1.entity_id = "sensor.television_energy" await sensor1.async_update_ha_state() sensor2 = DemoSensor( None, "Radio Energy", 14, DEVICE_CLASS_POWER, ENERGY_KILO_WATT_HOUR, None ) sensor2.hass = hass sensor2.entity_id = "sensor.radio_energy" with mock.patch( "homeassistant.util.dt.utcnow", return_value=datetime.datetime(1970, 1, 2, tzinfo=dt_util.UTC), ): await sensor2.async_update_ha_state() sensor3 = DemoSensor( None, "Electricity price", 0.123, None, f"SEK/{ENERGY_KILO_WATT_HOUR}", None ) sensor3.hass = hass sensor3.entity_id = "sensor.electricity_price" await sensor3.async_update_ha_state() sensor4 = DemoSensor(None, "Wind Direction", 25, None, DEGREE, None) sensor4.hass = hass sensor4.entity_id = "sensor.wind_direction" await sensor4.async_update_ha_state() sensor5 = DemoSensor( None, "SPS30 PM <1µm Weight concentration", 3.7069, None, CONCENTRATION_MICROGRAMS_PER_CUBIC_METER, None, ) sensor5.hass = hass sensor5.entity_id = "sensor.sps30_pm_1um_weight_concentration" await sensor5.async_update_ha_state() return await hass_client() async def test_view(hass, hass_client): """Test prometheus metrics view.""" client = await prometheus_client(hass, hass_client) resp = await client.get(prometheus.API_ENDPOINT) assert resp.status == 200 assert resp.headers["content-type"] == "text/plain" body = await resp.text() body = body.split("\n") assert len(body) > 3 assert "# HELP python_info Python platform information" in body assert ( "# HELP python_gc_objects_collected_total " "Objects collected during gc" in body ) assert ( 'temperature_c{domain="sensor",' 'entity="sensor.outside_temperature",' 'friendly_name="Outside Temperature"} 15.6' in body ) assert ( 'battery_level_percent{domain="sensor",' 'entity="sensor.outside_temperature",' 'friendly_name="Outside Temperature"} 12.0' in body ) assert ( 'current_temperature_c{domain="climate",' 'entity="climate.heatpump",' 'friendly_name="HeatPump"} 25.0' in body ) assert ( 'humidifier_target_humidity_percent{domain="humidifier",' 'entity="humidifier.humidifier",' 'friendly_name="Humidifier"} 68.0' in body ) assert ( 'humidifier_state{domain="humidifier",' 'entity="humidifier.dehumidifier",' 'friendly_name="Dehumidifier"} 1.0' in body ) assert ( 'humidifier_mode{domain="humidifier",' 'entity="humidifier.hygrostat",' 'friendly_name="Hygrostat",' 'mode="home"} 1.0' in body ) assert ( 'humidifier_mode{domain="humidifier",' 'entity="humidifier.hygrostat",' 'friendly_name="Hygrostat",' 'mode="eco"} 0.0' in body ) assert ( 'humidity_percent{domain="sensor",' 'entity="sensor.outside_humidity",' 'friendly_name="Outside Humidity"} 54.0' in body ) assert ( 'sensor_unit_kwh{domain="sensor",' 'entity="sensor.television_energy",' 'friendly_name="Television Energy"} 74.0' in body ) assert ( 'power_kwh{domain="sensor",' 'entity="sensor.radio_energy",' 'friendly_name="Radio Energy"} 14.0' in body ) assert ( 'entity_available{domain="sensor",' 'entity="sensor.radio_energy",' 'friendly_name="Radio Energy"} 1.0' in body ) assert ( 'last_updated_time_seconds{domain="sensor",' 'entity="sensor.radio_energy",' 'friendly_name="Radio Energy"} 86400.0' in body ) assert ( 'sensor_unit_sek_per_kwh{domain="sensor",' 'entity="sensor.electricity_price",' 'friendly_name="Electricity price"} 0.123' in body ) assert ( 'sensor_unit_u0xb0{domain="sensor",' 'entity="sensor.wind_direction",' 'friendly_name="Wind Direction"} 25.0' in body ) assert ( 'sensor_unit_u0xb5g_per_mu0xb3{domain="sensor",' 'entity="sensor.sps30_pm_1um_weight_concentration",' 'friendly_name="SPS30 PM <1µm Weight concentration"} 3.7069' in body ) @pytest.fixture(name="mock_client") def mock_client_fixture(): """Mock the prometheus client.""" with mock.patch(f"{PROMETHEUS_PATH}.prometheus_client") as client: counter_client = mock.MagicMock() client.Counter = mock.MagicMock(return_value=counter_client) setattr(counter_client, "labels", mock.MagicMock(return_value=mock.MagicMock())) yield counter_client @pytest.fixture def mock_bus(hass): """Mock the event bus listener.""" hass.bus.listen = mock.MagicMock() @pytest.mark.usefixtures("mock_bus") async def test_minimal_config(hass, mock_client): """Test the minimal config and defaults of component.""" config = {prometheus.DOMAIN: {}} assert await async_setup_component(hass, prometheus.DOMAIN, config) await hass.async_block_till_done() assert hass.bus.listen.called assert EVENT_STATE_CHANGED == hass.bus.listen.call_args_list[0][0][0] @pytest.mark.usefixtures("mock_bus") async def test_full_config(hass, mock_client): """Test the full config of component.""" config = { prometheus.DOMAIN: { "namespace": "ns", "default_metric": "m", "override_metric": "m", "component_config": {"fake.test": {"override_metric": "km"}}, "component_config_glob": {"fake.time_*": {"override_metric": "h"}}, "component_config_domain": {"climate": {"override_metric": "°C"}}, "filter": { "include_domains": ["climate"], "include_entity_globs": ["fake.time_*"], "include_entities": ["fake.test"], "exclude_domains": ["script"], "exclude_entity_globs": ["climate.excluded_*"], "exclude_entities": ["fake.time_excluded"], }, } } assert await async_setup_component(hass, prometheus.DOMAIN, config) await hass.async_block_till_done() assert hass.bus.listen.called assert EVENT_STATE_CHANGED == hass.bus.listen.call_args_list[0][0][0] def make_event(entity_id): """Make a mock event for test.""" domain = split_entity_id(entity_id)[0] state = mock.MagicMock( state="not blank", domain=domain, entity_id=entity_id, object_id="entity", attributes={}, ) return mock.MagicMock(data={"new_state": state}, time_fired=12345) async def _setup(hass, filter_config): """Shared set up for filtering tests.""" config = {prometheus.DOMAIN: {"filter": filter_config}} assert await async_setup_component(hass, prometheus.DOMAIN, config) await hass.async_block_till_done() return hass.bus.listen.call_args_list[0][0][1] @pytest.mark.usefixtures("mock_bus") async def test_allowlist(hass, mock_client): """Test an allowlist only config.""" handler_method = await _setup( hass, { "include_domains": ["fake"], "include_entity_globs": ["test.included_*"], "include_entities": ["not_real.included"], }, ) tests = [ FilterTest("climate.excluded", False), FilterTest("fake.included", True), FilterTest("test.excluded_test", False), FilterTest("test.included_test", True), FilterTest("not_real.included", True), FilterTest("not_real.excluded", False), ] for test in tests: event = make_event(test.id) handler_method(event) was_called = mock_client.labels.call_count == 1 assert test.should_pass == was_called mock_client.labels.reset_mock() @pytest.mark.usefixtures("mock_bus") async def test_denylist(hass, mock_client): """Test a denylist only config.""" handler_method = await _setup( hass, { "exclude_domains": ["fake"], "exclude_entity_globs": ["test.excluded_*"], "exclude_entities": ["not_real.excluded"], }, ) tests = [ FilterTest("fake.excluded", False), FilterTest("light.included", True), FilterTest("test.excluded_test", False), FilterTest("test.included_test", True), FilterTest("not_real.included", True), FilterTest("not_real.excluded", False), ] for test in tests: event = make_event(test.id) handler_method(event) was_called = mock_client.labels.call_count == 1 assert test.should_pass == was_called mock_client.labels.reset_mock() @pytest.mark.usefixtures("mock_bus") async def test_filtered_denylist(hass, mock_client): """Test a denylist config with a filtering allowlist.""" handler_method = await _setup( hass, { "include_entities": ["fake.included", "test.excluded_test"], "exclude_domains": ["fake"], "exclude_entity_globs": ["*.excluded_*"], "exclude_entities": ["not_real.excluded"], }, ) tests = [ FilterTest("fake.excluded", False), FilterTest("fake.included", True), FilterTest("alt_fake.excluded_test", False), FilterTest("test.excluded_test", True), FilterTest("not_real.excluded", False), FilterTest("not_real.included", True), ] for test in tests: event = make_event(test.id) handler_method(event) was_called = mock_client.labels.call_count == 1 assert test.should_pass == was_called mock_client.labels.reset_mock()
tchellomello/home-assistant
tests/components/prometheus/test_init.py
homeassistant/components/pi_hole/switch.py
"""Support for Rova garbage calendar.""" from datetime import datetime, timedelta import logging from requests.exceptions import ConnectTimeout, HTTPError from rova.rova import Rova import voluptuous as vol from homeassistant.components.sensor import PLATFORM_SCHEMA from homeassistant.const import ( CONF_MONITORED_CONDITIONS, CONF_NAME, DEVICE_CLASS_TIMESTAMP, ) import homeassistant.helpers.config_validation as cv from homeassistant.helpers.entity import Entity from homeassistant.util import Throttle # Config for rova requests. CONF_ZIP_CODE = "zip_code" CONF_HOUSE_NUMBER = "house_number" CONF_HOUSE_NUMBER_SUFFIX = "house_number_suffix" UPDATE_DELAY = timedelta(hours=12) SCAN_INTERVAL = timedelta(hours=12) # Supported sensor types: # Key: [json_key, name, icon] SENSOR_TYPES = { "bio": ["gft", "Biowaste", "mdi:recycle"], "paper": ["papier", "Paper", "mdi:recycle"], "plastic": ["pmd", "PET", "mdi:recycle"], "residual": ["restafval", "Residual", "mdi:recycle"], } PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( { vol.Required(CONF_ZIP_CODE): cv.string, vol.Required(CONF_HOUSE_NUMBER): cv.string, vol.Optional(CONF_HOUSE_NUMBER_SUFFIX, default=""): cv.string, vol.Optional(CONF_NAME, default="Rova"): cv.string, vol.Optional(CONF_MONITORED_CONDITIONS, default=["bio"]): vol.All( cv.ensure_list, [vol.In(SENSOR_TYPES)] ), } ) _LOGGER = logging.getLogger(__name__) def setup_platform(hass, config, add_entities, discovery_info=None): """Create the Rova data service and sensors.""" zip_code = config[CONF_ZIP_CODE] house_number = config[CONF_HOUSE_NUMBER] house_number_suffix = config[CONF_HOUSE_NUMBER_SUFFIX] platform_name = config[CONF_NAME] # Create new Rova object to retrieve data api = Rova(zip_code, house_number, house_number_suffix) try: if not api.is_rova_area(): _LOGGER.error("ROVA does not collect garbage in this area") return except (ConnectTimeout, HTTPError): _LOGGER.error("Could not retrieve details from ROVA API") return # Create rova data service which will retrieve and update the data. data_service = RovaData(api) # Create a new sensor for each garbage type. entities = [] for sensor_key in config[CONF_MONITORED_CONDITIONS]: sensor = RovaSensor(platform_name, sensor_key, data_service) entities.append(sensor) add_entities(entities, True) class RovaSensor(Entity): """Representation of a Rova sensor.""" def __init__(self, platform_name, sensor_key, data_service): """Initialize the sensor.""" self.sensor_key = sensor_key self.platform_name = platform_name self.data_service = data_service self._state = None self._json_key = SENSOR_TYPES[self.sensor_key][0] @property def name(self): """Return the name.""" return f"{self.platform_name}_{self.sensor_key}" @property def icon(self): """Return the sensor icon.""" return SENSOR_TYPES[self.sensor_key][2] @property def device_class(self): """Return the class of this sensor.""" return DEVICE_CLASS_TIMESTAMP @property def state(self): """Return the state of the sensor.""" return self._state def update(self): """Get the latest data from the sensor and update the state.""" self.data_service.update() pickup_date = self.data_service.data.get(self._json_key) if pickup_date is not None: self._state = pickup_date.isoformat() class RovaData: """Get and update the latest data from the Rova API.""" def __init__(self, api): """Initialize the data object.""" self.api = api self.data = {} @Throttle(UPDATE_DELAY) def update(self): """Update the data from the Rova API.""" try: items = self.api.get_calendar_items() except (ConnectTimeout, HTTPError): _LOGGER.error("Could not retrieve data, retry again later") return self.data = {} for item in items: date = datetime.strptime(item["Date"], "%Y-%m-%dT%H:%M:%S") code = item["GarbageTypeCode"].lower() if code not in self.data and date > datetime.now(): self.data[code] = date _LOGGER.debug("Updated Rova calendar: %s", self.data)
"""The tests for the Prometheus exporter.""" from dataclasses import dataclass import datetime import pytest from homeassistant.components import climate, humidifier, sensor from homeassistant.components.demo.sensor import DemoSensor import homeassistant.components.prometheus as prometheus from homeassistant.const import ( CONCENTRATION_MICROGRAMS_PER_CUBIC_METER, DEGREE, DEVICE_CLASS_POWER, ENERGY_KILO_WATT_HOUR, EVENT_STATE_CHANGED, ) from homeassistant.core import split_entity_id from homeassistant.setup import async_setup_component from homeassistant.util import dt as dt_util import tests.async_mock as mock PROMETHEUS_PATH = "homeassistant.components.prometheus" @dataclass class FilterTest: """Class for capturing a filter test.""" id: str should_pass: bool async def prometheus_client(hass, hass_client): """Initialize an hass_client with Prometheus component.""" await async_setup_component(hass, prometheus.DOMAIN, {prometheus.DOMAIN: {}}) await async_setup_component(hass, sensor.DOMAIN, {"sensor": [{"platform": "demo"}]}) await async_setup_component( hass, climate.DOMAIN, {"climate": [{"platform": "demo"}]} ) await hass.async_block_till_done() await async_setup_component( hass, humidifier.DOMAIN, {"humidifier": [{"platform": "demo"}]} ) sensor1 = DemoSensor( None, "Television Energy", 74, None, ENERGY_KILO_WATT_HOUR, None ) sensor1.hass = hass sensor1.entity_id = "sensor.television_energy" await sensor1.async_update_ha_state() sensor2 = DemoSensor( None, "Radio Energy", 14, DEVICE_CLASS_POWER, ENERGY_KILO_WATT_HOUR, None ) sensor2.hass = hass sensor2.entity_id = "sensor.radio_energy" with mock.patch( "homeassistant.util.dt.utcnow", return_value=datetime.datetime(1970, 1, 2, tzinfo=dt_util.UTC), ): await sensor2.async_update_ha_state() sensor3 = DemoSensor( None, "Electricity price", 0.123, None, f"SEK/{ENERGY_KILO_WATT_HOUR}", None ) sensor3.hass = hass sensor3.entity_id = "sensor.electricity_price" await sensor3.async_update_ha_state() sensor4 = DemoSensor(None, "Wind Direction", 25, None, DEGREE, None) sensor4.hass = hass sensor4.entity_id = "sensor.wind_direction" await sensor4.async_update_ha_state() sensor5 = DemoSensor( None, "SPS30 PM <1µm Weight concentration", 3.7069, None, CONCENTRATION_MICROGRAMS_PER_CUBIC_METER, None, ) sensor5.hass = hass sensor5.entity_id = "sensor.sps30_pm_1um_weight_concentration" await sensor5.async_update_ha_state() return await hass_client() async def test_view(hass, hass_client): """Test prometheus metrics view.""" client = await prometheus_client(hass, hass_client) resp = await client.get(prometheus.API_ENDPOINT) assert resp.status == 200 assert resp.headers["content-type"] == "text/plain" body = await resp.text() body = body.split("\n") assert len(body) > 3 assert "# HELP python_info Python platform information" in body assert ( "# HELP python_gc_objects_collected_total " "Objects collected during gc" in body ) assert ( 'temperature_c{domain="sensor",' 'entity="sensor.outside_temperature",' 'friendly_name="Outside Temperature"} 15.6' in body ) assert ( 'battery_level_percent{domain="sensor",' 'entity="sensor.outside_temperature",' 'friendly_name="Outside Temperature"} 12.0' in body ) assert ( 'current_temperature_c{domain="climate",' 'entity="climate.heatpump",' 'friendly_name="HeatPump"} 25.0' in body ) assert ( 'humidifier_target_humidity_percent{domain="humidifier",' 'entity="humidifier.humidifier",' 'friendly_name="Humidifier"} 68.0' in body ) assert ( 'humidifier_state{domain="humidifier",' 'entity="humidifier.dehumidifier",' 'friendly_name="Dehumidifier"} 1.0' in body ) assert ( 'humidifier_mode{domain="humidifier",' 'entity="humidifier.hygrostat",' 'friendly_name="Hygrostat",' 'mode="home"} 1.0' in body ) assert ( 'humidifier_mode{domain="humidifier",' 'entity="humidifier.hygrostat",' 'friendly_name="Hygrostat",' 'mode="eco"} 0.0' in body ) assert ( 'humidity_percent{domain="sensor",' 'entity="sensor.outside_humidity",' 'friendly_name="Outside Humidity"} 54.0' in body ) assert ( 'sensor_unit_kwh{domain="sensor",' 'entity="sensor.television_energy",' 'friendly_name="Television Energy"} 74.0' in body ) assert ( 'power_kwh{domain="sensor",' 'entity="sensor.radio_energy",' 'friendly_name="Radio Energy"} 14.0' in body ) assert ( 'entity_available{domain="sensor",' 'entity="sensor.radio_energy",' 'friendly_name="Radio Energy"} 1.0' in body ) assert ( 'last_updated_time_seconds{domain="sensor",' 'entity="sensor.radio_energy",' 'friendly_name="Radio Energy"} 86400.0' in body ) assert ( 'sensor_unit_sek_per_kwh{domain="sensor",' 'entity="sensor.electricity_price",' 'friendly_name="Electricity price"} 0.123' in body ) assert ( 'sensor_unit_u0xb0{domain="sensor",' 'entity="sensor.wind_direction",' 'friendly_name="Wind Direction"} 25.0' in body ) assert ( 'sensor_unit_u0xb5g_per_mu0xb3{domain="sensor",' 'entity="sensor.sps30_pm_1um_weight_concentration",' 'friendly_name="SPS30 PM <1µm Weight concentration"} 3.7069' in body ) @pytest.fixture(name="mock_client") def mock_client_fixture(): """Mock the prometheus client.""" with mock.patch(f"{PROMETHEUS_PATH}.prometheus_client") as client: counter_client = mock.MagicMock() client.Counter = mock.MagicMock(return_value=counter_client) setattr(counter_client, "labels", mock.MagicMock(return_value=mock.MagicMock())) yield counter_client @pytest.fixture def mock_bus(hass): """Mock the event bus listener.""" hass.bus.listen = mock.MagicMock() @pytest.mark.usefixtures("mock_bus") async def test_minimal_config(hass, mock_client): """Test the minimal config and defaults of component.""" config = {prometheus.DOMAIN: {}} assert await async_setup_component(hass, prometheus.DOMAIN, config) await hass.async_block_till_done() assert hass.bus.listen.called assert EVENT_STATE_CHANGED == hass.bus.listen.call_args_list[0][0][0] @pytest.mark.usefixtures("mock_bus") async def test_full_config(hass, mock_client): """Test the full config of component.""" config = { prometheus.DOMAIN: { "namespace": "ns", "default_metric": "m", "override_metric": "m", "component_config": {"fake.test": {"override_metric": "km"}}, "component_config_glob": {"fake.time_*": {"override_metric": "h"}}, "component_config_domain": {"climate": {"override_metric": "°C"}}, "filter": { "include_domains": ["climate"], "include_entity_globs": ["fake.time_*"], "include_entities": ["fake.test"], "exclude_domains": ["script"], "exclude_entity_globs": ["climate.excluded_*"], "exclude_entities": ["fake.time_excluded"], }, } } assert await async_setup_component(hass, prometheus.DOMAIN, config) await hass.async_block_till_done() assert hass.bus.listen.called assert EVENT_STATE_CHANGED == hass.bus.listen.call_args_list[0][0][0] def make_event(entity_id): """Make a mock event for test.""" domain = split_entity_id(entity_id)[0] state = mock.MagicMock( state="not blank", domain=domain, entity_id=entity_id, object_id="entity", attributes={}, ) return mock.MagicMock(data={"new_state": state}, time_fired=12345) async def _setup(hass, filter_config): """Shared set up for filtering tests.""" config = {prometheus.DOMAIN: {"filter": filter_config}} assert await async_setup_component(hass, prometheus.DOMAIN, config) await hass.async_block_till_done() return hass.bus.listen.call_args_list[0][0][1] @pytest.mark.usefixtures("mock_bus") async def test_allowlist(hass, mock_client): """Test an allowlist only config.""" handler_method = await _setup( hass, { "include_domains": ["fake"], "include_entity_globs": ["test.included_*"], "include_entities": ["not_real.included"], }, ) tests = [ FilterTest("climate.excluded", False), FilterTest("fake.included", True), FilterTest("test.excluded_test", False), FilterTest("test.included_test", True), FilterTest("not_real.included", True), FilterTest("not_real.excluded", False), ] for test in tests: event = make_event(test.id) handler_method(event) was_called = mock_client.labels.call_count == 1 assert test.should_pass == was_called mock_client.labels.reset_mock() @pytest.mark.usefixtures("mock_bus") async def test_denylist(hass, mock_client): """Test a denylist only config.""" handler_method = await _setup( hass, { "exclude_domains": ["fake"], "exclude_entity_globs": ["test.excluded_*"], "exclude_entities": ["not_real.excluded"], }, ) tests = [ FilterTest("fake.excluded", False), FilterTest("light.included", True), FilterTest("test.excluded_test", False), FilterTest("test.included_test", True), FilterTest("not_real.included", True), FilterTest("not_real.excluded", False), ] for test in tests: event = make_event(test.id) handler_method(event) was_called = mock_client.labels.call_count == 1 assert test.should_pass == was_called mock_client.labels.reset_mock() @pytest.mark.usefixtures("mock_bus") async def test_filtered_denylist(hass, mock_client): """Test a denylist config with a filtering allowlist.""" handler_method = await _setup( hass, { "include_entities": ["fake.included", "test.excluded_test"], "exclude_domains": ["fake"], "exclude_entity_globs": ["*.excluded_*"], "exclude_entities": ["not_real.excluded"], }, ) tests = [ FilterTest("fake.excluded", False), FilterTest("fake.included", True), FilterTest("alt_fake.excluded_test", False), FilterTest("test.excluded_test", True), FilterTest("not_real.excluded", False), FilterTest("not_real.included", True), ] for test in tests: event = make_event(test.id) handler_method(event) was_called = mock_client.labels.call_count == 1 assert test.should_pass == was_called mock_client.labels.reset_mock()
tchellomello/home-assistant
tests/components/prometheus/test_init.py
homeassistant/components/rova/sensor.py
"""Support for Recollect Waste curbside collection pickup.""" from datetime import timedelta import logging import recollect_waste import voluptuous as vol from homeassistant.components.sensor import PLATFORM_SCHEMA from homeassistant.const import CONF_NAME import homeassistant.helpers.config_validation as cv from homeassistant.helpers.entity import Entity _LOGGER = logging.getLogger(__name__) ATTR_PICKUP_TYPES = "pickup_types" ATTR_AREA_NAME = "area_name" CONF_PLACE_ID = "place_id" CONF_SERVICE_ID = "service_id" DEFAULT_NAME = "recollect_waste" ICON = "mdi:trash-can-outline" SCAN_INTERVAL = timedelta(days=1) PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( { vol.Required(CONF_PLACE_ID): cv.string, vol.Required(CONF_SERVICE_ID): cv.string, vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string, } ) def setup_platform(hass, config, add_entities, discovery_info=None): """Set up the Recollect Waste platform.""" client = recollect_waste.RecollectWasteClient( config[CONF_PLACE_ID], config[CONF_SERVICE_ID] ) # Ensure the client can connect to the API successfully # with given place_id and service_id. try: client.get_next_pickup() except recollect_waste.RecollectWasteException as ex: _LOGGER.error("Recollect Waste platform error. %s", ex) return add_entities([RecollectWasteSensor(config.get(CONF_NAME), client)], True) class RecollectWasteSensor(Entity): """Recollect Waste Sensor.""" def __init__(self, name, client): """Initialize the sensor.""" self._attributes = {} self._name = name self._state = None self.client = client @property def name(self): """Return the name of the sensor.""" return self._name @property def unique_id(self) -> str: """Return a unique ID.""" return f"{self.client.place_id}{self.client.service_id}" @property def state(self): """Return the state of the sensor.""" return self._state @property def device_state_attributes(self): """Return the state attributes.""" return self._attributes @property def icon(self): """Icon to use in the frontend.""" return ICON def update(self): """Update device state.""" try: pickup_event = self.client.get_next_pickup() self._state = pickup_event.event_date self._attributes.update( { ATTR_PICKUP_TYPES: pickup_event.pickup_types, ATTR_AREA_NAME: pickup_event.area_name, } ) except recollect_waste.RecollectWasteException as ex: _LOGGER.error("Recollect Waste platform error. %s", ex)
"""The tests for the Prometheus exporter.""" from dataclasses import dataclass import datetime import pytest from homeassistant.components import climate, humidifier, sensor from homeassistant.components.demo.sensor import DemoSensor import homeassistant.components.prometheus as prometheus from homeassistant.const import ( CONCENTRATION_MICROGRAMS_PER_CUBIC_METER, DEGREE, DEVICE_CLASS_POWER, ENERGY_KILO_WATT_HOUR, EVENT_STATE_CHANGED, ) from homeassistant.core import split_entity_id from homeassistant.setup import async_setup_component from homeassistant.util import dt as dt_util import tests.async_mock as mock PROMETHEUS_PATH = "homeassistant.components.prometheus" @dataclass class FilterTest: """Class for capturing a filter test.""" id: str should_pass: bool async def prometheus_client(hass, hass_client): """Initialize an hass_client with Prometheus component.""" await async_setup_component(hass, prometheus.DOMAIN, {prometheus.DOMAIN: {}}) await async_setup_component(hass, sensor.DOMAIN, {"sensor": [{"platform": "demo"}]}) await async_setup_component( hass, climate.DOMAIN, {"climate": [{"platform": "demo"}]} ) await hass.async_block_till_done() await async_setup_component( hass, humidifier.DOMAIN, {"humidifier": [{"platform": "demo"}]} ) sensor1 = DemoSensor( None, "Television Energy", 74, None, ENERGY_KILO_WATT_HOUR, None ) sensor1.hass = hass sensor1.entity_id = "sensor.television_energy" await sensor1.async_update_ha_state() sensor2 = DemoSensor( None, "Radio Energy", 14, DEVICE_CLASS_POWER, ENERGY_KILO_WATT_HOUR, None ) sensor2.hass = hass sensor2.entity_id = "sensor.radio_energy" with mock.patch( "homeassistant.util.dt.utcnow", return_value=datetime.datetime(1970, 1, 2, tzinfo=dt_util.UTC), ): await sensor2.async_update_ha_state() sensor3 = DemoSensor( None, "Electricity price", 0.123, None, f"SEK/{ENERGY_KILO_WATT_HOUR}", None ) sensor3.hass = hass sensor3.entity_id = "sensor.electricity_price" await sensor3.async_update_ha_state() sensor4 = DemoSensor(None, "Wind Direction", 25, None, DEGREE, None) sensor4.hass = hass sensor4.entity_id = "sensor.wind_direction" await sensor4.async_update_ha_state() sensor5 = DemoSensor( None, "SPS30 PM <1µm Weight concentration", 3.7069, None, CONCENTRATION_MICROGRAMS_PER_CUBIC_METER, None, ) sensor5.hass = hass sensor5.entity_id = "sensor.sps30_pm_1um_weight_concentration" await sensor5.async_update_ha_state() return await hass_client() async def test_view(hass, hass_client): """Test prometheus metrics view.""" client = await prometheus_client(hass, hass_client) resp = await client.get(prometheus.API_ENDPOINT) assert resp.status == 200 assert resp.headers["content-type"] == "text/plain" body = await resp.text() body = body.split("\n") assert len(body) > 3 assert "# HELP python_info Python platform information" in body assert ( "# HELP python_gc_objects_collected_total " "Objects collected during gc" in body ) assert ( 'temperature_c{domain="sensor",' 'entity="sensor.outside_temperature",' 'friendly_name="Outside Temperature"} 15.6' in body ) assert ( 'battery_level_percent{domain="sensor",' 'entity="sensor.outside_temperature",' 'friendly_name="Outside Temperature"} 12.0' in body ) assert ( 'current_temperature_c{domain="climate",' 'entity="climate.heatpump",' 'friendly_name="HeatPump"} 25.0' in body ) assert ( 'humidifier_target_humidity_percent{domain="humidifier",' 'entity="humidifier.humidifier",' 'friendly_name="Humidifier"} 68.0' in body ) assert ( 'humidifier_state{domain="humidifier",' 'entity="humidifier.dehumidifier",' 'friendly_name="Dehumidifier"} 1.0' in body ) assert ( 'humidifier_mode{domain="humidifier",' 'entity="humidifier.hygrostat",' 'friendly_name="Hygrostat",' 'mode="home"} 1.0' in body ) assert ( 'humidifier_mode{domain="humidifier",' 'entity="humidifier.hygrostat",' 'friendly_name="Hygrostat",' 'mode="eco"} 0.0' in body ) assert ( 'humidity_percent{domain="sensor",' 'entity="sensor.outside_humidity",' 'friendly_name="Outside Humidity"} 54.0' in body ) assert ( 'sensor_unit_kwh{domain="sensor",' 'entity="sensor.television_energy",' 'friendly_name="Television Energy"} 74.0' in body ) assert ( 'power_kwh{domain="sensor",' 'entity="sensor.radio_energy",' 'friendly_name="Radio Energy"} 14.0' in body ) assert ( 'entity_available{domain="sensor",' 'entity="sensor.radio_energy",' 'friendly_name="Radio Energy"} 1.0' in body ) assert ( 'last_updated_time_seconds{domain="sensor",' 'entity="sensor.radio_energy",' 'friendly_name="Radio Energy"} 86400.0' in body ) assert ( 'sensor_unit_sek_per_kwh{domain="sensor",' 'entity="sensor.electricity_price",' 'friendly_name="Electricity price"} 0.123' in body ) assert ( 'sensor_unit_u0xb0{domain="sensor",' 'entity="sensor.wind_direction",' 'friendly_name="Wind Direction"} 25.0' in body ) assert ( 'sensor_unit_u0xb5g_per_mu0xb3{domain="sensor",' 'entity="sensor.sps30_pm_1um_weight_concentration",' 'friendly_name="SPS30 PM <1µm Weight concentration"} 3.7069' in body ) @pytest.fixture(name="mock_client") def mock_client_fixture(): """Mock the prometheus client.""" with mock.patch(f"{PROMETHEUS_PATH}.prometheus_client") as client: counter_client = mock.MagicMock() client.Counter = mock.MagicMock(return_value=counter_client) setattr(counter_client, "labels", mock.MagicMock(return_value=mock.MagicMock())) yield counter_client @pytest.fixture def mock_bus(hass): """Mock the event bus listener.""" hass.bus.listen = mock.MagicMock() @pytest.mark.usefixtures("mock_bus") async def test_minimal_config(hass, mock_client): """Test the minimal config and defaults of component.""" config = {prometheus.DOMAIN: {}} assert await async_setup_component(hass, prometheus.DOMAIN, config) await hass.async_block_till_done() assert hass.bus.listen.called assert EVENT_STATE_CHANGED == hass.bus.listen.call_args_list[0][0][0] @pytest.mark.usefixtures("mock_bus") async def test_full_config(hass, mock_client): """Test the full config of component.""" config = { prometheus.DOMAIN: { "namespace": "ns", "default_metric": "m", "override_metric": "m", "component_config": {"fake.test": {"override_metric": "km"}}, "component_config_glob": {"fake.time_*": {"override_metric": "h"}}, "component_config_domain": {"climate": {"override_metric": "°C"}}, "filter": { "include_domains": ["climate"], "include_entity_globs": ["fake.time_*"], "include_entities": ["fake.test"], "exclude_domains": ["script"], "exclude_entity_globs": ["climate.excluded_*"], "exclude_entities": ["fake.time_excluded"], }, } } assert await async_setup_component(hass, prometheus.DOMAIN, config) await hass.async_block_till_done() assert hass.bus.listen.called assert EVENT_STATE_CHANGED == hass.bus.listen.call_args_list[0][0][0] def make_event(entity_id): """Make a mock event for test.""" domain = split_entity_id(entity_id)[0] state = mock.MagicMock( state="not blank", domain=domain, entity_id=entity_id, object_id="entity", attributes={}, ) return mock.MagicMock(data={"new_state": state}, time_fired=12345) async def _setup(hass, filter_config): """Shared set up for filtering tests.""" config = {prometheus.DOMAIN: {"filter": filter_config}} assert await async_setup_component(hass, prometheus.DOMAIN, config) await hass.async_block_till_done() return hass.bus.listen.call_args_list[0][0][1] @pytest.mark.usefixtures("mock_bus") async def test_allowlist(hass, mock_client): """Test an allowlist only config.""" handler_method = await _setup( hass, { "include_domains": ["fake"], "include_entity_globs": ["test.included_*"], "include_entities": ["not_real.included"], }, ) tests = [ FilterTest("climate.excluded", False), FilterTest("fake.included", True), FilterTest("test.excluded_test", False), FilterTest("test.included_test", True), FilterTest("not_real.included", True), FilterTest("not_real.excluded", False), ] for test in tests: event = make_event(test.id) handler_method(event) was_called = mock_client.labels.call_count == 1 assert test.should_pass == was_called mock_client.labels.reset_mock() @pytest.mark.usefixtures("mock_bus") async def test_denylist(hass, mock_client): """Test a denylist only config.""" handler_method = await _setup( hass, { "exclude_domains": ["fake"], "exclude_entity_globs": ["test.excluded_*"], "exclude_entities": ["not_real.excluded"], }, ) tests = [ FilterTest("fake.excluded", False), FilterTest("light.included", True), FilterTest("test.excluded_test", False), FilterTest("test.included_test", True), FilterTest("not_real.included", True), FilterTest("not_real.excluded", False), ] for test in tests: event = make_event(test.id) handler_method(event) was_called = mock_client.labels.call_count == 1 assert test.should_pass == was_called mock_client.labels.reset_mock() @pytest.mark.usefixtures("mock_bus") async def test_filtered_denylist(hass, mock_client): """Test a denylist config with a filtering allowlist.""" handler_method = await _setup( hass, { "include_entities": ["fake.included", "test.excluded_test"], "exclude_domains": ["fake"], "exclude_entity_globs": ["*.excluded_*"], "exclude_entities": ["not_real.excluded"], }, ) tests = [ FilterTest("fake.excluded", False), FilterTest("fake.included", True), FilterTest("alt_fake.excluded_test", False), FilterTest("test.excluded_test", True), FilterTest("not_real.excluded", False), FilterTest("not_real.included", True), ] for test in tests: event = make_event(test.id) handler_method(event) was_called = mock_client.labels.call_count == 1 assert test.should_pass == was_called mock_client.labels.reset_mock()
tchellomello/home-assistant
tests/components/prometheus/test_init.py
homeassistant/components/recollect_waste/sensor.py
"""Constants for the Rollease Acmeda Automate integration.""" import logging LOGGER = logging.getLogger(__package__) DOMAIN = "acmeda" ACMEDA_HUB_UPDATE = "acmeda_hub_update_{}" ACMEDA_ENTITY_REMOVE = "acmeda_entity_remove_{}"
"""The tests for the Prometheus exporter.""" from dataclasses import dataclass import datetime import pytest from homeassistant.components import climate, humidifier, sensor from homeassistant.components.demo.sensor import DemoSensor import homeassistant.components.prometheus as prometheus from homeassistant.const import ( CONCENTRATION_MICROGRAMS_PER_CUBIC_METER, DEGREE, DEVICE_CLASS_POWER, ENERGY_KILO_WATT_HOUR, EVENT_STATE_CHANGED, ) from homeassistant.core import split_entity_id from homeassistant.setup import async_setup_component from homeassistant.util import dt as dt_util import tests.async_mock as mock PROMETHEUS_PATH = "homeassistant.components.prometheus" @dataclass class FilterTest: """Class for capturing a filter test.""" id: str should_pass: bool async def prometheus_client(hass, hass_client): """Initialize an hass_client with Prometheus component.""" await async_setup_component(hass, prometheus.DOMAIN, {prometheus.DOMAIN: {}}) await async_setup_component(hass, sensor.DOMAIN, {"sensor": [{"platform": "demo"}]}) await async_setup_component( hass, climate.DOMAIN, {"climate": [{"platform": "demo"}]} ) await hass.async_block_till_done() await async_setup_component( hass, humidifier.DOMAIN, {"humidifier": [{"platform": "demo"}]} ) sensor1 = DemoSensor( None, "Television Energy", 74, None, ENERGY_KILO_WATT_HOUR, None ) sensor1.hass = hass sensor1.entity_id = "sensor.television_energy" await sensor1.async_update_ha_state() sensor2 = DemoSensor( None, "Radio Energy", 14, DEVICE_CLASS_POWER, ENERGY_KILO_WATT_HOUR, None ) sensor2.hass = hass sensor2.entity_id = "sensor.radio_energy" with mock.patch( "homeassistant.util.dt.utcnow", return_value=datetime.datetime(1970, 1, 2, tzinfo=dt_util.UTC), ): await sensor2.async_update_ha_state() sensor3 = DemoSensor( None, "Electricity price", 0.123, None, f"SEK/{ENERGY_KILO_WATT_HOUR}", None ) sensor3.hass = hass sensor3.entity_id = "sensor.electricity_price" await sensor3.async_update_ha_state() sensor4 = DemoSensor(None, "Wind Direction", 25, None, DEGREE, None) sensor4.hass = hass sensor4.entity_id = "sensor.wind_direction" await sensor4.async_update_ha_state() sensor5 = DemoSensor( None, "SPS30 PM <1µm Weight concentration", 3.7069, None, CONCENTRATION_MICROGRAMS_PER_CUBIC_METER, None, ) sensor5.hass = hass sensor5.entity_id = "sensor.sps30_pm_1um_weight_concentration" await sensor5.async_update_ha_state() return await hass_client() async def test_view(hass, hass_client): """Test prometheus metrics view.""" client = await prometheus_client(hass, hass_client) resp = await client.get(prometheus.API_ENDPOINT) assert resp.status == 200 assert resp.headers["content-type"] == "text/plain" body = await resp.text() body = body.split("\n") assert len(body) > 3 assert "# HELP python_info Python platform information" in body assert ( "# HELP python_gc_objects_collected_total " "Objects collected during gc" in body ) assert ( 'temperature_c{domain="sensor",' 'entity="sensor.outside_temperature",' 'friendly_name="Outside Temperature"} 15.6' in body ) assert ( 'battery_level_percent{domain="sensor",' 'entity="sensor.outside_temperature",' 'friendly_name="Outside Temperature"} 12.0' in body ) assert ( 'current_temperature_c{domain="climate",' 'entity="climate.heatpump",' 'friendly_name="HeatPump"} 25.0' in body ) assert ( 'humidifier_target_humidity_percent{domain="humidifier",' 'entity="humidifier.humidifier",' 'friendly_name="Humidifier"} 68.0' in body ) assert ( 'humidifier_state{domain="humidifier",' 'entity="humidifier.dehumidifier",' 'friendly_name="Dehumidifier"} 1.0' in body ) assert ( 'humidifier_mode{domain="humidifier",' 'entity="humidifier.hygrostat",' 'friendly_name="Hygrostat",' 'mode="home"} 1.0' in body ) assert ( 'humidifier_mode{domain="humidifier",' 'entity="humidifier.hygrostat",' 'friendly_name="Hygrostat",' 'mode="eco"} 0.0' in body ) assert ( 'humidity_percent{domain="sensor",' 'entity="sensor.outside_humidity",' 'friendly_name="Outside Humidity"} 54.0' in body ) assert ( 'sensor_unit_kwh{domain="sensor",' 'entity="sensor.television_energy",' 'friendly_name="Television Energy"} 74.0' in body ) assert ( 'power_kwh{domain="sensor",' 'entity="sensor.radio_energy",' 'friendly_name="Radio Energy"} 14.0' in body ) assert ( 'entity_available{domain="sensor",' 'entity="sensor.radio_energy",' 'friendly_name="Radio Energy"} 1.0' in body ) assert ( 'last_updated_time_seconds{domain="sensor",' 'entity="sensor.radio_energy",' 'friendly_name="Radio Energy"} 86400.0' in body ) assert ( 'sensor_unit_sek_per_kwh{domain="sensor",' 'entity="sensor.electricity_price",' 'friendly_name="Electricity price"} 0.123' in body ) assert ( 'sensor_unit_u0xb0{domain="sensor",' 'entity="sensor.wind_direction",' 'friendly_name="Wind Direction"} 25.0' in body ) assert ( 'sensor_unit_u0xb5g_per_mu0xb3{domain="sensor",' 'entity="sensor.sps30_pm_1um_weight_concentration",' 'friendly_name="SPS30 PM <1µm Weight concentration"} 3.7069' in body ) @pytest.fixture(name="mock_client") def mock_client_fixture(): """Mock the prometheus client.""" with mock.patch(f"{PROMETHEUS_PATH}.prometheus_client") as client: counter_client = mock.MagicMock() client.Counter = mock.MagicMock(return_value=counter_client) setattr(counter_client, "labels", mock.MagicMock(return_value=mock.MagicMock())) yield counter_client @pytest.fixture def mock_bus(hass): """Mock the event bus listener.""" hass.bus.listen = mock.MagicMock() @pytest.mark.usefixtures("mock_bus") async def test_minimal_config(hass, mock_client): """Test the minimal config and defaults of component.""" config = {prometheus.DOMAIN: {}} assert await async_setup_component(hass, prometheus.DOMAIN, config) await hass.async_block_till_done() assert hass.bus.listen.called assert EVENT_STATE_CHANGED == hass.bus.listen.call_args_list[0][0][0] @pytest.mark.usefixtures("mock_bus") async def test_full_config(hass, mock_client): """Test the full config of component.""" config = { prometheus.DOMAIN: { "namespace": "ns", "default_metric": "m", "override_metric": "m", "component_config": {"fake.test": {"override_metric": "km"}}, "component_config_glob": {"fake.time_*": {"override_metric": "h"}}, "component_config_domain": {"climate": {"override_metric": "°C"}}, "filter": { "include_domains": ["climate"], "include_entity_globs": ["fake.time_*"], "include_entities": ["fake.test"], "exclude_domains": ["script"], "exclude_entity_globs": ["climate.excluded_*"], "exclude_entities": ["fake.time_excluded"], }, } } assert await async_setup_component(hass, prometheus.DOMAIN, config) await hass.async_block_till_done() assert hass.bus.listen.called assert EVENT_STATE_CHANGED == hass.bus.listen.call_args_list[0][0][0] def make_event(entity_id): """Make a mock event for test.""" domain = split_entity_id(entity_id)[0] state = mock.MagicMock( state="not blank", domain=domain, entity_id=entity_id, object_id="entity", attributes={}, ) return mock.MagicMock(data={"new_state": state}, time_fired=12345) async def _setup(hass, filter_config): """Shared set up for filtering tests.""" config = {prometheus.DOMAIN: {"filter": filter_config}} assert await async_setup_component(hass, prometheus.DOMAIN, config) await hass.async_block_till_done() return hass.bus.listen.call_args_list[0][0][1] @pytest.mark.usefixtures("mock_bus") async def test_allowlist(hass, mock_client): """Test an allowlist only config.""" handler_method = await _setup( hass, { "include_domains": ["fake"], "include_entity_globs": ["test.included_*"], "include_entities": ["not_real.included"], }, ) tests = [ FilterTest("climate.excluded", False), FilterTest("fake.included", True), FilterTest("test.excluded_test", False), FilterTest("test.included_test", True), FilterTest("not_real.included", True), FilterTest("not_real.excluded", False), ] for test in tests: event = make_event(test.id) handler_method(event) was_called = mock_client.labels.call_count == 1 assert test.should_pass == was_called mock_client.labels.reset_mock() @pytest.mark.usefixtures("mock_bus") async def test_denylist(hass, mock_client): """Test a denylist only config.""" handler_method = await _setup( hass, { "exclude_domains": ["fake"], "exclude_entity_globs": ["test.excluded_*"], "exclude_entities": ["not_real.excluded"], }, ) tests = [ FilterTest("fake.excluded", False), FilterTest("light.included", True), FilterTest("test.excluded_test", False), FilterTest("test.included_test", True), FilterTest("not_real.included", True), FilterTest("not_real.excluded", False), ] for test in tests: event = make_event(test.id) handler_method(event) was_called = mock_client.labels.call_count == 1 assert test.should_pass == was_called mock_client.labels.reset_mock() @pytest.mark.usefixtures("mock_bus") async def test_filtered_denylist(hass, mock_client): """Test a denylist config with a filtering allowlist.""" handler_method = await _setup( hass, { "include_entities": ["fake.included", "test.excluded_test"], "exclude_domains": ["fake"], "exclude_entity_globs": ["*.excluded_*"], "exclude_entities": ["not_real.excluded"], }, ) tests = [ FilterTest("fake.excluded", False), FilterTest("fake.included", True), FilterTest("alt_fake.excluded_test", False), FilterTest("test.excluded_test", True), FilterTest("not_real.excluded", False), FilterTest("not_real.included", True), ] for test in tests: event = make_event(test.id) handler_method(event) was_called = mock_client.labels.call_count == 1 assert test.should_pass == was_called mock_client.labels.reset_mock()
tchellomello/home-assistant
tests/components/prometheus/test_init.py
homeassistant/components/acmeda/const.py
"""Allows the creation of a sensor that filters state property.""" from collections import Counter, deque from copy import copy from datetime import timedelta from functools import partial import logging from numbers import Number import statistics from typing import Optional import voluptuous as vol from homeassistant.components import history from homeassistant.components.sensor import PLATFORM_SCHEMA from homeassistant.const import ( ATTR_ENTITY_ID, ATTR_ICON, ATTR_UNIT_OF_MEASUREMENT, CONF_ENTITY_ID, CONF_NAME, STATE_UNAVAILABLE, STATE_UNKNOWN, ) from homeassistant.core import callback import homeassistant.helpers.config_validation as cv from homeassistant.helpers.entity import Entity from homeassistant.helpers.event import async_track_state_change_event from homeassistant.helpers.reload import async_setup_reload_service from homeassistant.util.decorator import Registry import homeassistant.util.dt as dt_util from . import DOMAIN, PLATFORMS _LOGGER = logging.getLogger(__name__) FILTER_NAME_RANGE = "range" FILTER_NAME_LOWPASS = "lowpass" FILTER_NAME_OUTLIER = "outlier" FILTER_NAME_THROTTLE = "throttle" FILTER_NAME_TIME_THROTTLE = "time_throttle" FILTER_NAME_TIME_SMA = "time_simple_moving_average" FILTERS = Registry() CONF_FILTERS = "filters" CONF_FILTER_NAME = "filter" CONF_FILTER_WINDOW_SIZE = "window_size" CONF_FILTER_PRECISION = "precision" CONF_FILTER_RADIUS = "radius" CONF_FILTER_TIME_CONSTANT = "time_constant" CONF_FILTER_LOWER_BOUND = "lower_bound" CONF_FILTER_UPPER_BOUND = "upper_bound" CONF_TIME_SMA_TYPE = "type" TIME_SMA_LAST = "last" WINDOW_SIZE_UNIT_NUMBER_EVENTS = 1 WINDOW_SIZE_UNIT_TIME = 2 DEFAULT_WINDOW_SIZE = 1 DEFAULT_PRECISION = 2 DEFAULT_FILTER_RADIUS = 2.0 DEFAULT_FILTER_TIME_CONSTANT = 10 NAME_TEMPLATE = "{} filter" ICON = "mdi:chart-line-variant" FILTER_SCHEMA = vol.Schema( {vol.Optional(CONF_FILTER_PRECISION, default=DEFAULT_PRECISION): vol.Coerce(int)} ) FILTER_OUTLIER_SCHEMA = FILTER_SCHEMA.extend( { vol.Required(CONF_FILTER_NAME): FILTER_NAME_OUTLIER, vol.Optional(CONF_FILTER_WINDOW_SIZE, default=DEFAULT_WINDOW_SIZE): vol.Coerce( int ), vol.Optional(CONF_FILTER_RADIUS, default=DEFAULT_FILTER_RADIUS): vol.Coerce( float ), } ) FILTER_LOWPASS_SCHEMA = FILTER_SCHEMA.extend( { vol.Required(CONF_FILTER_NAME): FILTER_NAME_LOWPASS, vol.Optional(CONF_FILTER_WINDOW_SIZE, default=DEFAULT_WINDOW_SIZE): vol.Coerce( int ), vol.Optional( CONF_FILTER_TIME_CONSTANT, default=DEFAULT_FILTER_TIME_CONSTANT ): vol.Coerce(int), } ) FILTER_RANGE_SCHEMA = FILTER_SCHEMA.extend( { vol.Required(CONF_FILTER_NAME): FILTER_NAME_RANGE, vol.Optional(CONF_FILTER_LOWER_BOUND): vol.Coerce(float), vol.Optional(CONF_FILTER_UPPER_BOUND): vol.Coerce(float), } ) FILTER_TIME_SMA_SCHEMA = FILTER_SCHEMA.extend( { vol.Required(CONF_FILTER_NAME): FILTER_NAME_TIME_SMA, vol.Optional(CONF_TIME_SMA_TYPE, default=TIME_SMA_LAST): vol.In( [TIME_SMA_LAST] ), vol.Required(CONF_FILTER_WINDOW_SIZE): vol.All( cv.time_period, cv.positive_timedelta ), } ) FILTER_THROTTLE_SCHEMA = FILTER_SCHEMA.extend( { vol.Required(CONF_FILTER_NAME): FILTER_NAME_THROTTLE, vol.Optional(CONF_FILTER_WINDOW_SIZE, default=DEFAULT_WINDOW_SIZE): vol.Coerce( int ), } ) FILTER_TIME_THROTTLE_SCHEMA = FILTER_SCHEMA.extend( { vol.Required(CONF_FILTER_NAME): FILTER_NAME_TIME_THROTTLE, vol.Required(CONF_FILTER_WINDOW_SIZE): vol.All( cv.time_period, cv.positive_timedelta ), } ) PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( { vol.Required(CONF_ENTITY_ID): cv.entity_id, vol.Optional(CONF_NAME): cv.string, vol.Required(CONF_FILTERS): vol.All( cv.ensure_list, [ vol.Any( FILTER_OUTLIER_SCHEMA, FILTER_LOWPASS_SCHEMA, FILTER_TIME_SMA_SCHEMA, FILTER_THROTTLE_SCHEMA, FILTER_TIME_THROTTLE_SCHEMA, FILTER_RANGE_SCHEMA, ) ], ), } ) async def async_setup_platform(hass, config, async_add_entities, discovery_info=None): """Set up the template sensors.""" await async_setup_reload_service(hass, DOMAIN, PLATFORMS) name = config.get(CONF_NAME) entity_id = config.get(CONF_ENTITY_ID) filters = [ FILTERS[_filter.pop(CONF_FILTER_NAME)](entity=entity_id, **_filter) for _filter in config[CONF_FILTERS] ] async_add_entities([SensorFilter(name, entity_id, filters)]) class SensorFilter(Entity): """Representation of a Filter Sensor.""" def __init__(self, name, entity_id, filters): """Initialize the sensor.""" self._name = name self._entity = entity_id self._unit_of_measurement = None self._state = None self._filters = filters self._icon = None @callback def _update_filter_sensor_state_event(self, event): """Handle device state changes.""" self._update_filter_sensor_state(event.data.get("new_state")) @callback def _update_filter_sensor_state(self, new_state, update_ha=True): """Process device state changes.""" if new_state is None or new_state.state in [STATE_UNKNOWN, STATE_UNAVAILABLE]: return temp_state = new_state try: for filt in self._filters: filtered_state = filt.filter_state(copy(temp_state)) _LOGGER.debug( "%s(%s=%s) -> %s", filt.name, self._entity, temp_state.state, "skip" if filt.skip_processing else filtered_state.state, ) if filt.skip_processing: return temp_state = filtered_state except ValueError: _LOGGER.error("Could not convert state: %s to number", self._state) return self._state = temp_state.state if self._icon is None: self._icon = new_state.attributes.get(ATTR_ICON, ICON) if self._unit_of_measurement is None: self._unit_of_measurement = new_state.attributes.get( ATTR_UNIT_OF_MEASUREMENT ) if update_ha: self.async_write_ha_state() async def async_added_to_hass(self): """Register callbacks.""" if "recorder" in self.hass.config.components: history_list = [] largest_window_items = 0 largest_window_time = timedelta(0) # Determine the largest window_size by type for filt in self._filters: if ( filt.window_unit == WINDOW_SIZE_UNIT_NUMBER_EVENTS and largest_window_items < filt.window_size ): largest_window_items = filt.window_size elif ( filt.window_unit == WINDOW_SIZE_UNIT_TIME and largest_window_time < filt.window_size ): largest_window_time = filt.window_size # Retrieve the largest window_size of each type if largest_window_items > 0: filter_history = await self.hass.async_add_job( partial( history.get_last_state_changes, self.hass, largest_window_items, entity_id=self._entity, ) ) if self._entity in filter_history: history_list.extend(filter_history[self._entity]) if largest_window_time > timedelta(seconds=0): start = dt_util.utcnow() - largest_window_time filter_history = await self.hass.async_add_job( partial( history.state_changes_during_period, self.hass, start, entity_id=self._entity, ) ) if self._entity in filter_history: history_list.extend( [ state for state in filter_history[self._entity] if state not in history_list ] ) # Sort the window states history_list = sorted(history_list, key=lambda s: s.last_updated) _LOGGER.debug( "Loading from history: %s", [(s.state, s.last_updated) for s in history_list], ) # Replay history through the filter chain for state in history_list: self._update_filter_sensor_state(state, False) self.async_on_remove( async_track_state_change_event( self.hass, [self._entity], self._update_filter_sensor_state_event ) ) @property def name(self): """Return the name of the sensor.""" return self._name @property def state(self): """Return the state of the sensor.""" return self._state @property def icon(self): """Return the icon to use in the frontend, if any.""" return self._icon @property def unit_of_measurement(self): """Return the unit_of_measurement of the device.""" return self._unit_of_measurement @property def should_poll(self): """No polling needed.""" return False @property def device_state_attributes(self): """Return the state attributes of the sensor.""" state_attr = {ATTR_ENTITY_ID: self._entity} return state_attr class FilterState: """State abstraction for filter usage.""" def __init__(self, state): """Initialize with HA State object.""" self.timestamp = state.last_updated try: self.state = float(state.state) except ValueError: self.state = state.state def set_precision(self, precision): """Set precision of Number based states.""" if isinstance(self.state, Number): value = round(float(self.state), precision) self.state = int(value) if precision == 0 else value def __str__(self): """Return state as the string representation of FilterState.""" return str(self.state) def __repr__(self): """Return timestamp and state as the representation of FilterState.""" return f"{self.timestamp} : {self.state}" class Filter: """Filter skeleton.""" def __init__( self, name, window_size: int = 1, precision: Optional[int] = None, entity: Optional[str] = None, ): """Initialize common attributes. :param window_size: size of the sliding window that holds previous values :param precision: round filtered value to precision value :param entity: used for debugging only """ if isinstance(window_size, int): self.states = deque(maxlen=window_size) self.window_unit = WINDOW_SIZE_UNIT_NUMBER_EVENTS else: self.states = deque(maxlen=0) self.window_unit = WINDOW_SIZE_UNIT_TIME self.precision = precision self._name = name self._entity = entity self._skip_processing = False self._window_size = window_size self._store_raw = False self._only_numbers = True @property def window_size(self): """Return window size.""" return self._window_size @property def name(self): """Return filter name.""" return self._name @property def skip_processing(self): """Return whether the current filter_state should be skipped.""" return self._skip_processing def _filter_state(self, new_state): """Implement filter.""" raise NotImplementedError() def filter_state(self, new_state): """Implement a common interface for filters.""" fstate = FilterState(new_state) if self._only_numbers and not isinstance(fstate.state, Number): raise ValueError filtered = self._filter_state(fstate) filtered.set_precision(self.precision) if self._store_raw: self.states.append(copy(FilterState(new_state))) else: self.states.append(copy(filtered)) new_state.state = filtered.state return new_state @FILTERS.register(FILTER_NAME_RANGE) class RangeFilter(Filter): """Range filter. Determines if new state is in the range of upper_bound and lower_bound. If not inside, lower or upper bound is returned instead. """ def __init__( self, entity, precision: Optional[int] = DEFAULT_PRECISION, lower_bound: Optional[float] = None, upper_bound: Optional[float] = None, ): """Initialize Filter. :param upper_bound: band upper bound :param lower_bound: band lower bound """ super().__init__(FILTER_NAME_RANGE, precision=precision, entity=entity) self._lower_bound = lower_bound self._upper_bound = upper_bound self._stats_internal = Counter() def _filter_state(self, new_state): """Implement the range filter.""" if self._upper_bound is not None and new_state.state > self._upper_bound: self._stats_internal["erasures_up"] += 1 _LOGGER.debug( "Upper outlier nr. %s in %s: %s", self._stats_internal["erasures_up"], self._entity, new_state, ) new_state.state = self._upper_bound elif self._lower_bound is not None and new_state.state < self._lower_bound: self._stats_internal["erasures_low"] += 1 _LOGGER.debug( "Lower outlier nr. %s in %s: %s", self._stats_internal["erasures_low"], self._entity, new_state, ) new_state.state = self._lower_bound return new_state @FILTERS.register(FILTER_NAME_OUTLIER) class OutlierFilter(Filter): """BASIC outlier filter. Determines if new state is in a band around the median. """ def __init__(self, window_size, precision, entity, radius: float): """Initialize Filter. :param radius: band radius """ super().__init__(FILTER_NAME_OUTLIER, window_size, precision, entity) self._radius = radius self._stats_internal = Counter() self._store_raw = True def _filter_state(self, new_state): """Implement the outlier filter.""" median = statistics.median([s.state for s in self.states]) if self.states else 0 if ( len(self.states) == self.states.maxlen and abs(new_state.state - median) > self._radius ): self._stats_internal["erasures"] += 1 _LOGGER.debug( "Outlier nr. %s in %s: %s", self._stats_internal["erasures"], self._entity, new_state, ) new_state.state = median return new_state @FILTERS.register(FILTER_NAME_LOWPASS) class LowPassFilter(Filter): """BASIC Low Pass Filter.""" def __init__(self, window_size, precision, entity, time_constant: int): """Initialize Filter.""" super().__init__(FILTER_NAME_LOWPASS, window_size, precision, entity) self._time_constant = time_constant def _filter_state(self, new_state): """Implement the low pass filter.""" if not self.states: return new_state new_weight = 1.0 / self._time_constant prev_weight = 1.0 - new_weight new_state.state = ( prev_weight * self.states[-1].state + new_weight * new_state.state ) return new_state @FILTERS.register(FILTER_NAME_TIME_SMA) class TimeSMAFilter(Filter): """Simple Moving Average (SMA) Filter. The window_size is determined by time, and SMA is time weighted. """ def __init__( self, window_size, precision, entity, type ): # pylint: disable=redefined-builtin """Initialize Filter. :param type: type of algorithm used to connect discrete values """ super().__init__(FILTER_NAME_TIME_SMA, window_size, precision, entity) self._time_window = window_size self.last_leak = None self.queue = deque() def _leak(self, left_boundary): """Remove timeouted elements.""" while self.queue: if self.queue[0].timestamp + self._time_window <= left_boundary: self.last_leak = self.queue.popleft() else: return def _filter_state(self, new_state): """Implement the Simple Moving Average filter.""" self._leak(new_state.timestamp) self.queue.append(copy(new_state)) moving_sum = 0 start = new_state.timestamp - self._time_window prev_state = self.last_leak or self.queue[0] for state in self.queue: moving_sum += (state.timestamp - start).total_seconds() * prev_state.state start = state.timestamp prev_state = state new_state.state = moving_sum / self._time_window.total_seconds() return new_state @FILTERS.register(FILTER_NAME_THROTTLE) class ThrottleFilter(Filter): """Throttle Filter. One sample per window. """ def __init__(self, window_size, precision, entity): """Initialize Filter.""" super().__init__(FILTER_NAME_THROTTLE, window_size, precision, entity) self._only_numbers = False def _filter_state(self, new_state): """Implement the throttle filter.""" if not self.states or len(self.states) == self.states.maxlen: self.states.clear() self._skip_processing = False else: self._skip_processing = True return new_state @FILTERS.register(FILTER_NAME_TIME_THROTTLE) class TimeThrottleFilter(Filter): """Time Throttle Filter. One sample per time period. """ def __init__(self, window_size, precision, entity): """Initialize Filter.""" super().__init__(FILTER_NAME_TIME_THROTTLE, window_size, precision, entity) self._time_window = window_size self._last_emitted_at = None self._only_numbers = False def _filter_state(self, new_state): """Implement the filter.""" window_start = new_state.timestamp - self._time_window if not self._last_emitted_at or self._last_emitted_at <= window_start: self._last_emitted_at = new_state.timestamp self._skip_processing = False else: self._skip_processing = True return new_state
"""The tests for the Prometheus exporter.""" from dataclasses import dataclass import datetime import pytest from homeassistant.components import climate, humidifier, sensor from homeassistant.components.demo.sensor import DemoSensor import homeassistant.components.prometheus as prometheus from homeassistant.const import ( CONCENTRATION_MICROGRAMS_PER_CUBIC_METER, DEGREE, DEVICE_CLASS_POWER, ENERGY_KILO_WATT_HOUR, EVENT_STATE_CHANGED, ) from homeassistant.core import split_entity_id from homeassistant.setup import async_setup_component from homeassistant.util import dt as dt_util import tests.async_mock as mock PROMETHEUS_PATH = "homeassistant.components.prometheus" @dataclass class FilterTest: """Class for capturing a filter test.""" id: str should_pass: bool async def prometheus_client(hass, hass_client): """Initialize an hass_client with Prometheus component.""" await async_setup_component(hass, prometheus.DOMAIN, {prometheus.DOMAIN: {}}) await async_setup_component(hass, sensor.DOMAIN, {"sensor": [{"platform": "demo"}]}) await async_setup_component( hass, climate.DOMAIN, {"climate": [{"platform": "demo"}]} ) await hass.async_block_till_done() await async_setup_component( hass, humidifier.DOMAIN, {"humidifier": [{"platform": "demo"}]} ) sensor1 = DemoSensor( None, "Television Energy", 74, None, ENERGY_KILO_WATT_HOUR, None ) sensor1.hass = hass sensor1.entity_id = "sensor.television_energy" await sensor1.async_update_ha_state() sensor2 = DemoSensor( None, "Radio Energy", 14, DEVICE_CLASS_POWER, ENERGY_KILO_WATT_HOUR, None ) sensor2.hass = hass sensor2.entity_id = "sensor.radio_energy" with mock.patch( "homeassistant.util.dt.utcnow", return_value=datetime.datetime(1970, 1, 2, tzinfo=dt_util.UTC), ): await sensor2.async_update_ha_state() sensor3 = DemoSensor( None, "Electricity price", 0.123, None, f"SEK/{ENERGY_KILO_WATT_HOUR}", None ) sensor3.hass = hass sensor3.entity_id = "sensor.electricity_price" await sensor3.async_update_ha_state() sensor4 = DemoSensor(None, "Wind Direction", 25, None, DEGREE, None) sensor4.hass = hass sensor4.entity_id = "sensor.wind_direction" await sensor4.async_update_ha_state() sensor5 = DemoSensor( None, "SPS30 PM <1µm Weight concentration", 3.7069, None, CONCENTRATION_MICROGRAMS_PER_CUBIC_METER, None, ) sensor5.hass = hass sensor5.entity_id = "sensor.sps30_pm_1um_weight_concentration" await sensor5.async_update_ha_state() return await hass_client() async def test_view(hass, hass_client): """Test prometheus metrics view.""" client = await prometheus_client(hass, hass_client) resp = await client.get(prometheus.API_ENDPOINT) assert resp.status == 200 assert resp.headers["content-type"] == "text/plain" body = await resp.text() body = body.split("\n") assert len(body) > 3 assert "# HELP python_info Python platform information" in body assert ( "# HELP python_gc_objects_collected_total " "Objects collected during gc" in body ) assert ( 'temperature_c{domain="sensor",' 'entity="sensor.outside_temperature",' 'friendly_name="Outside Temperature"} 15.6' in body ) assert ( 'battery_level_percent{domain="sensor",' 'entity="sensor.outside_temperature",' 'friendly_name="Outside Temperature"} 12.0' in body ) assert ( 'current_temperature_c{domain="climate",' 'entity="climate.heatpump",' 'friendly_name="HeatPump"} 25.0' in body ) assert ( 'humidifier_target_humidity_percent{domain="humidifier",' 'entity="humidifier.humidifier",' 'friendly_name="Humidifier"} 68.0' in body ) assert ( 'humidifier_state{domain="humidifier",' 'entity="humidifier.dehumidifier",' 'friendly_name="Dehumidifier"} 1.0' in body ) assert ( 'humidifier_mode{domain="humidifier",' 'entity="humidifier.hygrostat",' 'friendly_name="Hygrostat",' 'mode="home"} 1.0' in body ) assert ( 'humidifier_mode{domain="humidifier",' 'entity="humidifier.hygrostat",' 'friendly_name="Hygrostat",' 'mode="eco"} 0.0' in body ) assert ( 'humidity_percent{domain="sensor",' 'entity="sensor.outside_humidity",' 'friendly_name="Outside Humidity"} 54.0' in body ) assert ( 'sensor_unit_kwh{domain="sensor",' 'entity="sensor.television_energy",' 'friendly_name="Television Energy"} 74.0' in body ) assert ( 'power_kwh{domain="sensor",' 'entity="sensor.radio_energy",' 'friendly_name="Radio Energy"} 14.0' in body ) assert ( 'entity_available{domain="sensor",' 'entity="sensor.radio_energy",' 'friendly_name="Radio Energy"} 1.0' in body ) assert ( 'last_updated_time_seconds{domain="sensor",' 'entity="sensor.radio_energy",' 'friendly_name="Radio Energy"} 86400.0' in body ) assert ( 'sensor_unit_sek_per_kwh{domain="sensor",' 'entity="sensor.electricity_price",' 'friendly_name="Electricity price"} 0.123' in body ) assert ( 'sensor_unit_u0xb0{domain="sensor",' 'entity="sensor.wind_direction",' 'friendly_name="Wind Direction"} 25.0' in body ) assert ( 'sensor_unit_u0xb5g_per_mu0xb3{domain="sensor",' 'entity="sensor.sps30_pm_1um_weight_concentration",' 'friendly_name="SPS30 PM <1µm Weight concentration"} 3.7069' in body ) @pytest.fixture(name="mock_client") def mock_client_fixture(): """Mock the prometheus client.""" with mock.patch(f"{PROMETHEUS_PATH}.prometheus_client") as client: counter_client = mock.MagicMock() client.Counter = mock.MagicMock(return_value=counter_client) setattr(counter_client, "labels", mock.MagicMock(return_value=mock.MagicMock())) yield counter_client @pytest.fixture def mock_bus(hass): """Mock the event bus listener.""" hass.bus.listen = mock.MagicMock() @pytest.mark.usefixtures("mock_bus") async def test_minimal_config(hass, mock_client): """Test the minimal config and defaults of component.""" config = {prometheus.DOMAIN: {}} assert await async_setup_component(hass, prometheus.DOMAIN, config) await hass.async_block_till_done() assert hass.bus.listen.called assert EVENT_STATE_CHANGED == hass.bus.listen.call_args_list[0][0][0] @pytest.mark.usefixtures("mock_bus") async def test_full_config(hass, mock_client): """Test the full config of component.""" config = { prometheus.DOMAIN: { "namespace": "ns", "default_metric": "m", "override_metric": "m", "component_config": {"fake.test": {"override_metric": "km"}}, "component_config_glob": {"fake.time_*": {"override_metric": "h"}}, "component_config_domain": {"climate": {"override_metric": "°C"}}, "filter": { "include_domains": ["climate"], "include_entity_globs": ["fake.time_*"], "include_entities": ["fake.test"], "exclude_domains": ["script"], "exclude_entity_globs": ["climate.excluded_*"], "exclude_entities": ["fake.time_excluded"], }, } } assert await async_setup_component(hass, prometheus.DOMAIN, config) await hass.async_block_till_done() assert hass.bus.listen.called assert EVENT_STATE_CHANGED == hass.bus.listen.call_args_list[0][0][0] def make_event(entity_id): """Make a mock event for test.""" domain = split_entity_id(entity_id)[0] state = mock.MagicMock( state="not blank", domain=domain, entity_id=entity_id, object_id="entity", attributes={}, ) return mock.MagicMock(data={"new_state": state}, time_fired=12345) async def _setup(hass, filter_config): """Shared set up for filtering tests.""" config = {prometheus.DOMAIN: {"filter": filter_config}} assert await async_setup_component(hass, prometheus.DOMAIN, config) await hass.async_block_till_done() return hass.bus.listen.call_args_list[0][0][1] @pytest.mark.usefixtures("mock_bus") async def test_allowlist(hass, mock_client): """Test an allowlist only config.""" handler_method = await _setup( hass, { "include_domains": ["fake"], "include_entity_globs": ["test.included_*"], "include_entities": ["not_real.included"], }, ) tests = [ FilterTest("climate.excluded", False), FilterTest("fake.included", True), FilterTest("test.excluded_test", False), FilterTest("test.included_test", True), FilterTest("not_real.included", True), FilterTest("not_real.excluded", False), ] for test in tests: event = make_event(test.id) handler_method(event) was_called = mock_client.labels.call_count == 1 assert test.should_pass == was_called mock_client.labels.reset_mock() @pytest.mark.usefixtures("mock_bus") async def test_denylist(hass, mock_client): """Test a denylist only config.""" handler_method = await _setup( hass, { "exclude_domains": ["fake"], "exclude_entity_globs": ["test.excluded_*"], "exclude_entities": ["not_real.excluded"], }, ) tests = [ FilterTest("fake.excluded", False), FilterTest("light.included", True), FilterTest("test.excluded_test", False), FilterTest("test.included_test", True), FilterTest("not_real.included", True), FilterTest("not_real.excluded", False), ] for test in tests: event = make_event(test.id) handler_method(event) was_called = mock_client.labels.call_count == 1 assert test.should_pass == was_called mock_client.labels.reset_mock() @pytest.mark.usefixtures("mock_bus") async def test_filtered_denylist(hass, mock_client): """Test a denylist config with a filtering allowlist.""" handler_method = await _setup( hass, { "include_entities": ["fake.included", "test.excluded_test"], "exclude_domains": ["fake"], "exclude_entity_globs": ["*.excluded_*"], "exclude_entities": ["not_real.excluded"], }, ) tests = [ FilterTest("fake.excluded", False), FilterTest("fake.included", True), FilterTest("alt_fake.excluded_test", False), FilterTest("test.excluded_test", True), FilterTest("not_real.excluded", False), FilterTest("not_real.included", True), ] for test in tests: event = make_event(test.id) handler_method(event) was_called = mock_client.labels.call_count == 1 assert test.should_pass == was_called mock_client.labels.reset_mock()
tchellomello/home-assistant
tests/components/prometheus/test_init.py
homeassistant/components/filter/sensor.py
"""Support for Alexa skill service end point.""" import logging import voluptuous as vol from homeassistant.const import CONF_CLIENT_ID, CONF_CLIENT_SECRET, CONF_NAME from homeassistant.helpers import config_validation as cv, entityfilter from . import flash_briefings, intent, smart_home_http from .const import ( CONF_AUDIO, CONF_DESCRIPTION, CONF_DISPLAY_CATEGORIES, CONF_DISPLAY_URL, CONF_ENDPOINT, CONF_ENTITY_CONFIG, CONF_FILTER, CONF_LOCALE, CONF_PASSWORD, CONF_SUPPORTED_LOCALES, CONF_TEXT, CONF_TITLE, CONF_UID, DOMAIN, ) _LOGGER = logging.getLogger(__name__) CONF_FLASH_BRIEFINGS = "flash_briefings" CONF_SMART_HOME = "smart_home" DEFAULT_LOCALE = "en-US" ALEXA_ENTITY_SCHEMA = vol.Schema( { vol.Optional(CONF_DESCRIPTION): cv.string, vol.Optional(CONF_DISPLAY_CATEGORIES): cv.string, vol.Optional(CONF_NAME): cv.string, } ) SMART_HOME_SCHEMA = vol.Schema( { vol.Optional(CONF_ENDPOINT): cv.string, vol.Optional(CONF_CLIENT_ID): cv.string, vol.Optional(CONF_CLIENT_SECRET): cv.string, vol.Optional(CONF_LOCALE, default=DEFAULT_LOCALE): vol.In( CONF_SUPPORTED_LOCALES ), vol.Optional(CONF_FILTER, default={}): entityfilter.FILTER_SCHEMA, vol.Optional(CONF_ENTITY_CONFIG): {cv.entity_id: ALEXA_ENTITY_SCHEMA}, } ) CONFIG_SCHEMA = vol.Schema( { DOMAIN: { CONF_FLASH_BRIEFINGS: { vol.Required(CONF_PASSWORD): cv.string, cv.string: vol.All( cv.ensure_list, [ { vol.Optional(CONF_UID): cv.string, vol.Required(CONF_TITLE): cv.template, vol.Optional(CONF_AUDIO): cv.template, vol.Required(CONF_TEXT, default=""): cv.template, vol.Optional(CONF_DISPLAY_URL): cv.template, } ], ), }, # vol.Optional here would mean we couldn't distinguish between an empty # smart_home: and none at all. CONF_SMART_HOME: vol.Any(SMART_HOME_SCHEMA, None), } }, extra=vol.ALLOW_EXTRA, ) async def async_setup(hass, config): """Activate the Alexa component.""" if DOMAIN not in config: return True config = config[DOMAIN] flash_briefings_config = config.get(CONF_FLASH_BRIEFINGS) intent.async_setup(hass) if flash_briefings_config: flash_briefings.async_setup(hass, flash_briefings_config) try: smart_home_config = config[CONF_SMART_HOME] except KeyError: pass else: smart_home_config = smart_home_config or SMART_HOME_SCHEMA({}) await smart_home_http.async_setup(hass, smart_home_config) return True
"""The tests for the Prometheus exporter.""" from dataclasses import dataclass import datetime import pytest from homeassistant.components import climate, humidifier, sensor from homeassistant.components.demo.sensor import DemoSensor import homeassistant.components.prometheus as prometheus from homeassistant.const import ( CONCENTRATION_MICROGRAMS_PER_CUBIC_METER, DEGREE, DEVICE_CLASS_POWER, ENERGY_KILO_WATT_HOUR, EVENT_STATE_CHANGED, ) from homeassistant.core import split_entity_id from homeassistant.setup import async_setup_component from homeassistant.util import dt as dt_util import tests.async_mock as mock PROMETHEUS_PATH = "homeassistant.components.prometheus" @dataclass class FilterTest: """Class for capturing a filter test.""" id: str should_pass: bool async def prometheus_client(hass, hass_client): """Initialize an hass_client with Prometheus component.""" await async_setup_component(hass, prometheus.DOMAIN, {prometheus.DOMAIN: {}}) await async_setup_component(hass, sensor.DOMAIN, {"sensor": [{"platform": "demo"}]}) await async_setup_component( hass, climate.DOMAIN, {"climate": [{"platform": "demo"}]} ) await hass.async_block_till_done() await async_setup_component( hass, humidifier.DOMAIN, {"humidifier": [{"platform": "demo"}]} ) sensor1 = DemoSensor( None, "Television Energy", 74, None, ENERGY_KILO_WATT_HOUR, None ) sensor1.hass = hass sensor1.entity_id = "sensor.television_energy" await sensor1.async_update_ha_state() sensor2 = DemoSensor( None, "Radio Energy", 14, DEVICE_CLASS_POWER, ENERGY_KILO_WATT_HOUR, None ) sensor2.hass = hass sensor2.entity_id = "sensor.radio_energy" with mock.patch( "homeassistant.util.dt.utcnow", return_value=datetime.datetime(1970, 1, 2, tzinfo=dt_util.UTC), ): await sensor2.async_update_ha_state() sensor3 = DemoSensor( None, "Electricity price", 0.123, None, f"SEK/{ENERGY_KILO_WATT_HOUR}", None ) sensor3.hass = hass sensor3.entity_id = "sensor.electricity_price" await sensor3.async_update_ha_state() sensor4 = DemoSensor(None, "Wind Direction", 25, None, DEGREE, None) sensor4.hass = hass sensor4.entity_id = "sensor.wind_direction" await sensor4.async_update_ha_state() sensor5 = DemoSensor( None, "SPS30 PM <1µm Weight concentration", 3.7069, None, CONCENTRATION_MICROGRAMS_PER_CUBIC_METER, None, ) sensor5.hass = hass sensor5.entity_id = "sensor.sps30_pm_1um_weight_concentration" await sensor5.async_update_ha_state() return await hass_client() async def test_view(hass, hass_client): """Test prometheus metrics view.""" client = await prometheus_client(hass, hass_client) resp = await client.get(prometheus.API_ENDPOINT) assert resp.status == 200 assert resp.headers["content-type"] == "text/plain" body = await resp.text() body = body.split("\n") assert len(body) > 3 assert "# HELP python_info Python platform information" in body assert ( "# HELP python_gc_objects_collected_total " "Objects collected during gc" in body ) assert ( 'temperature_c{domain="sensor",' 'entity="sensor.outside_temperature",' 'friendly_name="Outside Temperature"} 15.6' in body ) assert ( 'battery_level_percent{domain="sensor",' 'entity="sensor.outside_temperature",' 'friendly_name="Outside Temperature"} 12.0' in body ) assert ( 'current_temperature_c{domain="climate",' 'entity="climate.heatpump",' 'friendly_name="HeatPump"} 25.0' in body ) assert ( 'humidifier_target_humidity_percent{domain="humidifier",' 'entity="humidifier.humidifier",' 'friendly_name="Humidifier"} 68.0' in body ) assert ( 'humidifier_state{domain="humidifier",' 'entity="humidifier.dehumidifier",' 'friendly_name="Dehumidifier"} 1.0' in body ) assert ( 'humidifier_mode{domain="humidifier",' 'entity="humidifier.hygrostat",' 'friendly_name="Hygrostat",' 'mode="home"} 1.0' in body ) assert ( 'humidifier_mode{domain="humidifier",' 'entity="humidifier.hygrostat",' 'friendly_name="Hygrostat",' 'mode="eco"} 0.0' in body ) assert ( 'humidity_percent{domain="sensor",' 'entity="sensor.outside_humidity",' 'friendly_name="Outside Humidity"} 54.0' in body ) assert ( 'sensor_unit_kwh{domain="sensor",' 'entity="sensor.television_energy",' 'friendly_name="Television Energy"} 74.0' in body ) assert ( 'power_kwh{domain="sensor",' 'entity="sensor.radio_energy",' 'friendly_name="Radio Energy"} 14.0' in body ) assert ( 'entity_available{domain="sensor",' 'entity="sensor.radio_energy",' 'friendly_name="Radio Energy"} 1.0' in body ) assert ( 'last_updated_time_seconds{domain="sensor",' 'entity="sensor.radio_energy",' 'friendly_name="Radio Energy"} 86400.0' in body ) assert ( 'sensor_unit_sek_per_kwh{domain="sensor",' 'entity="sensor.electricity_price",' 'friendly_name="Electricity price"} 0.123' in body ) assert ( 'sensor_unit_u0xb0{domain="sensor",' 'entity="sensor.wind_direction",' 'friendly_name="Wind Direction"} 25.0' in body ) assert ( 'sensor_unit_u0xb5g_per_mu0xb3{domain="sensor",' 'entity="sensor.sps30_pm_1um_weight_concentration",' 'friendly_name="SPS30 PM <1µm Weight concentration"} 3.7069' in body ) @pytest.fixture(name="mock_client") def mock_client_fixture(): """Mock the prometheus client.""" with mock.patch(f"{PROMETHEUS_PATH}.prometheus_client") as client: counter_client = mock.MagicMock() client.Counter = mock.MagicMock(return_value=counter_client) setattr(counter_client, "labels", mock.MagicMock(return_value=mock.MagicMock())) yield counter_client @pytest.fixture def mock_bus(hass): """Mock the event bus listener.""" hass.bus.listen = mock.MagicMock() @pytest.mark.usefixtures("mock_bus") async def test_minimal_config(hass, mock_client): """Test the minimal config and defaults of component.""" config = {prometheus.DOMAIN: {}} assert await async_setup_component(hass, prometheus.DOMAIN, config) await hass.async_block_till_done() assert hass.bus.listen.called assert EVENT_STATE_CHANGED == hass.bus.listen.call_args_list[0][0][0] @pytest.mark.usefixtures("mock_bus") async def test_full_config(hass, mock_client): """Test the full config of component.""" config = { prometheus.DOMAIN: { "namespace": "ns", "default_metric": "m", "override_metric": "m", "component_config": {"fake.test": {"override_metric": "km"}}, "component_config_glob": {"fake.time_*": {"override_metric": "h"}}, "component_config_domain": {"climate": {"override_metric": "°C"}}, "filter": { "include_domains": ["climate"], "include_entity_globs": ["fake.time_*"], "include_entities": ["fake.test"], "exclude_domains": ["script"], "exclude_entity_globs": ["climate.excluded_*"], "exclude_entities": ["fake.time_excluded"], }, } } assert await async_setup_component(hass, prometheus.DOMAIN, config) await hass.async_block_till_done() assert hass.bus.listen.called assert EVENT_STATE_CHANGED == hass.bus.listen.call_args_list[0][0][0] def make_event(entity_id): """Make a mock event for test.""" domain = split_entity_id(entity_id)[0] state = mock.MagicMock( state="not blank", domain=domain, entity_id=entity_id, object_id="entity", attributes={}, ) return mock.MagicMock(data={"new_state": state}, time_fired=12345) async def _setup(hass, filter_config): """Shared set up for filtering tests.""" config = {prometheus.DOMAIN: {"filter": filter_config}} assert await async_setup_component(hass, prometheus.DOMAIN, config) await hass.async_block_till_done() return hass.bus.listen.call_args_list[0][0][1] @pytest.mark.usefixtures("mock_bus") async def test_allowlist(hass, mock_client): """Test an allowlist only config.""" handler_method = await _setup( hass, { "include_domains": ["fake"], "include_entity_globs": ["test.included_*"], "include_entities": ["not_real.included"], }, ) tests = [ FilterTest("climate.excluded", False), FilterTest("fake.included", True), FilterTest("test.excluded_test", False), FilterTest("test.included_test", True), FilterTest("not_real.included", True), FilterTest("not_real.excluded", False), ] for test in tests: event = make_event(test.id) handler_method(event) was_called = mock_client.labels.call_count == 1 assert test.should_pass == was_called mock_client.labels.reset_mock() @pytest.mark.usefixtures("mock_bus") async def test_denylist(hass, mock_client): """Test a denylist only config.""" handler_method = await _setup( hass, { "exclude_domains": ["fake"], "exclude_entity_globs": ["test.excluded_*"], "exclude_entities": ["not_real.excluded"], }, ) tests = [ FilterTest("fake.excluded", False), FilterTest("light.included", True), FilterTest("test.excluded_test", False), FilterTest("test.included_test", True), FilterTest("not_real.included", True), FilterTest("not_real.excluded", False), ] for test in tests: event = make_event(test.id) handler_method(event) was_called = mock_client.labels.call_count == 1 assert test.should_pass == was_called mock_client.labels.reset_mock() @pytest.mark.usefixtures("mock_bus") async def test_filtered_denylist(hass, mock_client): """Test a denylist config with a filtering allowlist.""" handler_method = await _setup( hass, { "include_entities": ["fake.included", "test.excluded_test"], "exclude_domains": ["fake"], "exclude_entity_globs": ["*.excluded_*"], "exclude_entities": ["not_real.excluded"], }, ) tests = [ FilterTest("fake.excluded", False), FilterTest("fake.included", True), FilterTest("alt_fake.excluded_test", False), FilterTest("test.excluded_test", True), FilterTest("not_real.excluded", False), FilterTest("not_real.included", True), ] for test in tests: event = make_event(test.id) handler_method(event) was_called = mock_client.labels.call_count == 1 assert test.should_pass == was_called mock_client.labels.reset_mock()
tchellomello/home-assistant
tests/components/prometheus/test_init.py
homeassistant/components/alexa/__init__.py
"""Support for LCN lights.""" import pypck from homeassistant.components.light import ( ATTR_BRIGHTNESS, ATTR_TRANSITION, SUPPORT_BRIGHTNESS, SUPPORT_TRANSITION, LightEntity, ) from homeassistant.const import CONF_ADDRESS from . import LcnDevice from .const import ( CONF_CONNECTIONS, CONF_DIMMABLE, CONF_OUTPUT, CONF_TRANSITION, DATA_LCN, OUTPUT_PORTS, ) from .helpers import get_connection async def async_setup_platform( hass, hass_config, async_add_entities, discovery_info=None ): """Set up the LCN light platform.""" if discovery_info is None: return devices = [] for config in discovery_info: address, connection_id = config[CONF_ADDRESS] addr = pypck.lcn_addr.LcnAddr(*address) connections = hass.data[DATA_LCN][CONF_CONNECTIONS] connection = get_connection(connections, connection_id) address_connection = connection.get_address_conn(addr) if config[CONF_OUTPUT] in OUTPUT_PORTS: device = LcnOutputLight(config, address_connection) else: # in RELAY_PORTS device = LcnRelayLight(config, address_connection) devices.append(device) async_add_entities(devices) class LcnOutputLight(LcnDevice, LightEntity): """Representation of a LCN light for output ports.""" def __init__(self, config, address_connection): """Initialize the LCN light.""" super().__init__(config, address_connection) self.output = pypck.lcn_defs.OutputPort[config[CONF_OUTPUT]] self._transition = pypck.lcn_defs.time_to_ramp_value(config[CONF_TRANSITION]) self.dimmable = config[CONF_DIMMABLE] self._brightness = 255 self._is_on = None self._is_dimming_to_zero = False async def async_added_to_hass(self): """Run when entity about to be added to hass.""" await super().async_added_to_hass() await self.address_connection.activate_status_request_handler(self.output) @property def supported_features(self): """Flag supported features.""" features = SUPPORT_TRANSITION if self.dimmable: features |= SUPPORT_BRIGHTNESS return features @property def brightness(self): """Return the brightness of this light between 0..255.""" return self._brightness @property def is_on(self): """Return True if entity is on.""" return self._is_on async def async_turn_on(self, **kwargs): """Turn the entity on.""" self._is_on = True self._is_dimming_to_zero = False if ATTR_BRIGHTNESS in kwargs: percent = int(kwargs[ATTR_BRIGHTNESS] / 255.0 * 100) else: percent = 100 if ATTR_TRANSITION in kwargs: transition = pypck.lcn_defs.time_to_ramp_value( kwargs[ATTR_TRANSITION] * 1000 ) else: transition = self._transition self.address_connection.dim_output(self.output.value, percent, transition) self.async_write_ha_state() async def async_turn_off(self, **kwargs): """Turn the entity off.""" self._is_on = False if ATTR_TRANSITION in kwargs: transition = pypck.lcn_defs.time_to_ramp_value( kwargs[ATTR_TRANSITION] * 1000 ) else: transition = self._transition self._is_dimming_to_zero = bool(transition) self.address_connection.dim_output(self.output.value, 0, transition) self.async_write_ha_state() def input_received(self, input_obj): """Set light state when LCN input object (command) is received.""" if ( not isinstance(input_obj, pypck.inputs.ModStatusOutput) or input_obj.get_output_id() != self.output.value ): return self._brightness = int(input_obj.get_percent() / 100.0 * 255) if self.brightness == 0: self._is_dimming_to_zero = False if not self._is_dimming_to_zero: self._is_on = self.brightness > 0 self.async_write_ha_state() class LcnRelayLight(LcnDevice, LightEntity): """Representation of a LCN light for relay ports.""" def __init__(self, config, address_connection): """Initialize the LCN light.""" super().__init__(config, address_connection) self.output = pypck.lcn_defs.RelayPort[config[CONF_OUTPUT]] self._is_on = None async def async_added_to_hass(self): """Run when entity about to be added to hass.""" await super().async_added_to_hass() await self.address_connection.activate_status_request_handler(self.output) @property def is_on(self): """Return True if entity is on.""" return self._is_on async def async_turn_on(self, **kwargs): """Turn the entity on.""" self._is_on = True states = [pypck.lcn_defs.RelayStateModifier.NOCHANGE] * 8 states[self.output.value] = pypck.lcn_defs.RelayStateModifier.ON self.address_connection.control_relays(states) self.async_write_ha_state() async def async_turn_off(self, **kwargs): """Turn the entity off.""" self._is_on = False states = [pypck.lcn_defs.RelayStateModifier.NOCHANGE] * 8 states[self.output.value] = pypck.lcn_defs.RelayStateModifier.OFF self.address_connection.control_relays(states) self.async_write_ha_state() def input_received(self, input_obj): """Set light state when LCN input object (command) is received.""" if not isinstance(input_obj, pypck.inputs.ModStatusRelays): return self._is_on = input_obj.get_state(self.output.value) self.async_write_ha_state()
"""The tests for the Prometheus exporter.""" from dataclasses import dataclass import datetime import pytest from homeassistant.components import climate, humidifier, sensor from homeassistant.components.demo.sensor import DemoSensor import homeassistant.components.prometheus as prometheus from homeassistant.const import ( CONCENTRATION_MICROGRAMS_PER_CUBIC_METER, DEGREE, DEVICE_CLASS_POWER, ENERGY_KILO_WATT_HOUR, EVENT_STATE_CHANGED, ) from homeassistant.core import split_entity_id from homeassistant.setup import async_setup_component from homeassistant.util import dt as dt_util import tests.async_mock as mock PROMETHEUS_PATH = "homeassistant.components.prometheus" @dataclass class FilterTest: """Class for capturing a filter test.""" id: str should_pass: bool async def prometheus_client(hass, hass_client): """Initialize an hass_client with Prometheus component.""" await async_setup_component(hass, prometheus.DOMAIN, {prometheus.DOMAIN: {}}) await async_setup_component(hass, sensor.DOMAIN, {"sensor": [{"platform": "demo"}]}) await async_setup_component( hass, climate.DOMAIN, {"climate": [{"platform": "demo"}]} ) await hass.async_block_till_done() await async_setup_component( hass, humidifier.DOMAIN, {"humidifier": [{"platform": "demo"}]} ) sensor1 = DemoSensor( None, "Television Energy", 74, None, ENERGY_KILO_WATT_HOUR, None ) sensor1.hass = hass sensor1.entity_id = "sensor.television_energy" await sensor1.async_update_ha_state() sensor2 = DemoSensor( None, "Radio Energy", 14, DEVICE_CLASS_POWER, ENERGY_KILO_WATT_HOUR, None ) sensor2.hass = hass sensor2.entity_id = "sensor.radio_energy" with mock.patch( "homeassistant.util.dt.utcnow", return_value=datetime.datetime(1970, 1, 2, tzinfo=dt_util.UTC), ): await sensor2.async_update_ha_state() sensor3 = DemoSensor( None, "Electricity price", 0.123, None, f"SEK/{ENERGY_KILO_WATT_HOUR}", None ) sensor3.hass = hass sensor3.entity_id = "sensor.electricity_price" await sensor3.async_update_ha_state() sensor4 = DemoSensor(None, "Wind Direction", 25, None, DEGREE, None) sensor4.hass = hass sensor4.entity_id = "sensor.wind_direction" await sensor4.async_update_ha_state() sensor5 = DemoSensor( None, "SPS30 PM <1µm Weight concentration", 3.7069, None, CONCENTRATION_MICROGRAMS_PER_CUBIC_METER, None, ) sensor5.hass = hass sensor5.entity_id = "sensor.sps30_pm_1um_weight_concentration" await sensor5.async_update_ha_state() return await hass_client() async def test_view(hass, hass_client): """Test prometheus metrics view.""" client = await prometheus_client(hass, hass_client) resp = await client.get(prometheus.API_ENDPOINT) assert resp.status == 200 assert resp.headers["content-type"] == "text/plain" body = await resp.text() body = body.split("\n") assert len(body) > 3 assert "# HELP python_info Python platform information" in body assert ( "# HELP python_gc_objects_collected_total " "Objects collected during gc" in body ) assert ( 'temperature_c{domain="sensor",' 'entity="sensor.outside_temperature",' 'friendly_name="Outside Temperature"} 15.6' in body ) assert ( 'battery_level_percent{domain="sensor",' 'entity="sensor.outside_temperature",' 'friendly_name="Outside Temperature"} 12.0' in body ) assert ( 'current_temperature_c{domain="climate",' 'entity="climate.heatpump",' 'friendly_name="HeatPump"} 25.0' in body ) assert ( 'humidifier_target_humidity_percent{domain="humidifier",' 'entity="humidifier.humidifier",' 'friendly_name="Humidifier"} 68.0' in body ) assert ( 'humidifier_state{domain="humidifier",' 'entity="humidifier.dehumidifier",' 'friendly_name="Dehumidifier"} 1.0' in body ) assert ( 'humidifier_mode{domain="humidifier",' 'entity="humidifier.hygrostat",' 'friendly_name="Hygrostat",' 'mode="home"} 1.0' in body ) assert ( 'humidifier_mode{domain="humidifier",' 'entity="humidifier.hygrostat",' 'friendly_name="Hygrostat",' 'mode="eco"} 0.0' in body ) assert ( 'humidity_percent{domain="sensor",' 'entity="sensor.outside_humidity",' 'friendly_name="Outside Humidity"} 54.0' in body ) assert ( 'sensor_unit_kwh{domain="sensor",' 'entity="sensor.television_energy",' 'friendly_name="Television Energy"} 74.0' in body ) assert ( 'power_kwh{domain="sensor",' 'entity="sensor.radio_energy",' 'friendly_name="Radio Energy"} 14.0' in body ) assert ( 'entity_available{domain="sensor",' 'entity="sensor.radio_energy",' 'friendly_name="Radio Energy"} 1.0' in body ) assert ( 'last_updated_time_seconds{domain="sensor",' 'entity="sensor.radio_energy",' 'friendly_name="Radio Energy"} 86400.0' in body ) assert ( 'sensor_unit_sek_per_kwh{domain="sensor",' 'entity="sensor.electricity_price",' 'friendly_name="Electricity price"} 0.123' in body ) assert ( 'sensor_unit_u0xb0{domain="sensor",' 'entity="sensor.wind_direction",' 'friendly_name="Wind Direction"} 25.0' in body ) assert ( 'sensor_unit_u0xb5g_per_mu0xb3{domain="sensor",' 'entity="sensor.sps30_pm_1um_weight_concentration",' 'friendly_name="SPS30 PM <1µm Weight concentration"} 3.7069' in body ) @pytest.fixture(name="mock_client") def mock_client_fixture(): """Mock the prometheus client.""" with mock.patch(f"{PROMETHEUS_PATH}.prometheus_client") as client: counter_client = mock.MagicMock() client.Counter = mock.MagicMock(return_value=counter_client) setattr(counter_client, "labels", mock.MagicMock(return_value=mock.MagicMock())) yield counter_client @pytest.fixture def mock_bus(hass): """Mock the event bus listener.""" hass.bus.listen = mock.MagicMock() @pytest.mark.usefixtures("mock_bus") async def test_minimal_config(hass, mock_client): """Test the minimal config and defaults of component.""" config = {prometheus.DOMAIN: {}} assert await async_setup_component(hass, prometheus.DOMAIN, config) await hass.async_block_till_done() assert hass.bus.listen.called assert EVENT_STATE_CHANGED == hass.bus.listen.call_args_list[0][0][0] @pytest.mark.usefixtures("mock_bus") async def test_full_config(hass, mock_client): """Test the full config of component.""" config = { prometheus.DOMAIN: { "namespace": "ns", "default_metric": "m", "override_metric": "m", "component_config": {"fake.test": {"override_metric": "km"}}, "component_config_glob": {"fake.time_*": {"override_metric": "h"}}, "component_config_domain": {"climate": {"override_metric": "°C"}}, "filter": { "include_domains": ["climate"], "include_entity_globs": ["fake.time_*"], "include_entities": ["fake.test"], "exclude_domains": ["script"], "exclude_entity_globs": ["climate.excluded_*"], "exclude_entities": ["fake.time_excluded"], }, } } assert await async_setup_component(hass, prometheus.DOMAIN, config) await hass.async_block_till_done() assert hass.bus.listen.called assert EVENT_STATE_CHANGED == hass.bus.listen.call_args_list[0][0][0] def make_event(entity_id): """Make a mock event for test.""" domain = split_entity_id(entity_id)[0] state = mock.MagicMock( state="not blank", domain=domain, entity_id=entity_id, object_id="entity", attributes={}, ) return mock.MagicMock(data={"new_state": state}, time_fired=12345) async def _setup(hass, filter_config): """Shared set up for filtering tests.""" config = {prometheus.DOMAIN: {"filter": filter_config}} assert await async_setup_component(hass, prometheus.DOMAIN, config) await hass.async_block_till_done() return hass.bus.listen.call_args_list[0][0][1] @pytest.mark.usefixtures("mock_bus") async def test_allowlist(hass, mock_client): """Test an allowlist only config.""" handler_method = await _setup( hass, { "include_domains": ["fake"], "include_entity_globs": ["test.included_*"], "include_entities": ["not_real.included"], }, ) tests = [ FilterTest("climate.excluded", False), FilterTest("fake.included", True), FilterTest("test.excluded_test", False), FilterTest("test.included_test", True), FilterTest("not_real.included", True), FilterTest("not_real.excluded", False), ] for test in tests: event = make_event(test.id) handler_method(event) was_called = mock_client.labels.call_count == 1 assert test.should_pass == was_called mock_client.labels.reset_mock() @pytest.mark.usefixtures("mock_bus") async def test_denylist(hass, mock_client): """Test a denylist only config.""" handler_method = await _setup( hass, { "exclude_domains": ["fake"], "exclude_entity_globs": ["test.excluded_*"], "exclude_entities": ["not_real.excluded"], }, ) tests = [ FilterTest("fake.excluded", False), FilterTest("light.included", True), FilterTest("test.excluded_test", False), FilterTest("test.included_test", True), FilterTest("not_real.included", True), FilterTest("not_real.excluded", False), ] for test in tests: event = make_event(test.id) handler_method(event) was_called = mock_client.labels.call_count == 1 assert test.should_pass == was_called mock_client.labels.reset_mock() @pytest.mark.usefixtures("mock_bus") async def test_filtered_denylist(hass, mock_client): """Test a denylist config with a filtering allowlist.""" handler_method = await _setup( hass, { "include_entities": ["fake.included", "test.excluded_test"], "exclude_domains": ["fake"], "exclude_entity_globs": ["*.excluded_*"], "exclude_entities": ["not_real.excluded"], }, ) tests = [ FilterTest("fake.excluded", False), FilterTest("fake.included", True), FilterTest("alt_fake.excluded_test", False), FilterTest("test.excluded_test", True), FilterTest("not_real.excluded", False), FilterTest("not_real.included", True), ] for test in tests: event = make_event(test.id) handler_method(event) was_called = mock_client.labels.call_count == 1 assert test.should_pass == was_called mock_client.labels.reset_mock()
tchellomello/home-assistant
tests/components/prometheus/test_init.py
homeassistant/components/lcn/light.py
"""Support for AlarmDecoder devices.""" import asyncio from datetime import timedelta import logging from adext import AdExt from alarmdecoder.devices import SerialDevice, SocketDevice from alarmdecoder.util import NoDeviceError from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( CONF_HOST, CONF_PORT, CONF_PROTOCOL, EVENT_HOMEASSISTANT_STOP, ) from homeassistant.helpers.typing import HomeAssistantType from homeassistant.util import dt as dt_util from .const import ( CONF_DEVICE_BAUD, CONF_DEVICE_PATH, DATA_AD, DATA_REMOVE_STOP_LISTENER, DATA_REMOVE_UPDATE_LISTENER, DATA_RESTART, DOMAIN, PROTOCOL_SERIAL, PROTOCOL_SOCKET, SIGNAL_PANEL_MESSAGE, SIGNAL_REL_MESSAGE, SIGNAL_RFX_MESSAGE, SIGNAL_ZONE_FAULT, SIGNAL_ZONE_RESTORE, ) _LOGGER = logging.getLogger(__name__) PLATFORMS = ["alarm_control_panel", "sensor", "binary_sensor"] async def async_setup(hass, config): """Set up for the AlarmDecoder devices.""" return True async def async_setup_entry(hass: HomeAssistantType, entry: ConfigEntry) -> bool: """Set up AlarmDecoder config flow.""" undo_listener = entry.add_update_listener(_update_listener) ad_connection = entry.data protocol = ad_connection[CONF_PROTOCOL] def stop_alarmdecoder(event): """Handle the shutdown of AlarmDecoder.""" if not hass.data.get(DOMAIN): return _LOGGER.debug("Shutting down alarmdecoder") hass.data[DOMAIN][entry.entry_id][DATA_RESTART] = False controller.close() async def open_connection(now=None): """Open a connection to AlarmDecoder.""" try: await hass.async_add_executor_job(controller.open, baud) except NoDeviceError: _LOGGER.debug("Failed to connect. Retrying in 5 seconds") hass.helpers.event.async_track_point_in_time( open_connection, dt_util.utcnow() + timedelta(seconds=5) ) return _LOGGER.debug("Established a connection with the alarmdecoder") hass.data[DOMAIN][entry.entry_id][DATA_RESTART] = True def handle_closed_connection(event): """Restart after unexpected loss of connection.""" if not hass.data[DOMAIN][entry.entry_id][DATA_RESTART]: return hass.data[DOMAIN][entry.entry_id][DATA_RESTART] = False _LOGGER.warning("AlarmDecoder unexpectedly lost connection") hass.add_job(open_connection) def handle_message(sender, message): """Handle message from AlarmDecoder.""" hass.helpers.dispatcher.dispatcher_send(SIGNAL_PANEL_MESSAGE, message) def handle_rfx_message(sender, message): """Handle RFX message from AlarmDecoder.""" hass.helpers.dispatcher.dispatcher_send(SIGNAL_RFX_MESSAGE, message) def zone_fault_callback(sender, zone): """Handle zone fault from AlarmDecoder.""" hass.helpers.dispatcher.dispatcher_send(SIGNAL_ZONE_FAULT, zone) def zone_restore_callback(sender, zone): """Handle zone restore from AlarmDecoder.""" hass.helpers.dispatcher.dispatcher_send(SIGNAL_ZONE_RESTORE, zone) def handle_rel_message(sender, message): """Handle relay or zone expander message from AlarmDecoder.""" hass.helpers.dispatcher.dispatcher_send(SIGNAL_REL_MESSAGE, message) baud = ad_connection.get(CONF_DEVICE_BAUD) if protocol == PROTOCOL_SOCKET: host = ad_connection[CONF_HOST] port = ad_connection[CONF_PORT] controller = AdExt(SocketDevice(interface=(host, port))) if protocol == PROTOCOL_SERIAL: path = ad_connection[CONF_DEVICE_PATH] controller = AdExt(SerialDevice(interface=path)) controller.on_message += handle_message controller.on_rfx_message += handle_rfx_message controller.on_zone_fault += zone_fault_callback controller.on_zone_restore += zone_restore_callback controller.on_close += handle_closed_connection controller.on_expander_message += handle_rel_message remove_stop_listener = hass.bus.async_listen_once( EVENT_HOMEASSISTANT_STOP, stop_alarmdecoder ) hass.data.setdefault(DOMAIN, {}) hass.data[DOMAIN][entry.entry_id] = { DATA_AD: controller, DATA_REMOVE_UPDATE_LISTENER: undo_listener, DATA_REMOVE_STOP_LISTENER: remove_stop_listener, DATA_RESTART: False, } await open_connection() for component in PLATFORMS: hass.async_create_task( hass.config_entries.async_forward_entry_setup(entry, component) ) return True async def async_unload_entry(hass: HomeAssistantType, entry: ConfigEntry): """Unload a AlarmDecoder entry.""" hass.data[DOMAIN][entry.entry_id][DATA_RESTART] = False unload_ok = all( await asyncio.gather( *[ hass.config_entries.async_forward_entry_unload(entry, component) for component in PLATFORMS ] ) ) if not unload_ok: return False hass.data[DOMAIN][entry.entry_id][DATA_REMOVE_UPDATE_LISTENER]() hass.data[DOMAIN][entry.entry_id][DATA_REMOVE_STOP_LISTENER]() await hass.async_add_executor_job(hass.data[DOMAIN][entry.entry_id][DATA_AD].close) if hass.data[DOMAIN][entry.entry_id]: hass.data[DOMAIN].pop(entry.entry_id) if not hass.data[DOMAIN]: hass.data.pop(DOMAIN) return True async def _update_listener(hass: HomeAssistantType, entry: ConfigEntry): """Handle options update.""" _LOGGER.debug("AlarmDecoder options updated: %s", entry.as_dict()["options"]) await hass.config_entries.async_reload(entry.entry_id)
"""The tests for the Prometheus exporter.""" from dataclasses import dataclass import datetime import pytest from homeassistant.components import climate, humidifier, sensor from homeassistant.components.demo.sensor import DemoSensor import homeassistant.components.prometheus as prometheus from homeassistant.const import ( CONCENTRATION_MICROGRAMS_PER_CUBIC_METER, DEGREE, DEVICE_CLASS_POWER, ENERGY_KILO_WATT_HOUR, EVENT_STATE_CHANGED, ) from homeassistant.core import split_entity_id from homeassistant.setup import async_setup_component from homeassistant.util import dt as dt_util import tests.async_mock as mock PROMETHEUS_PATH = "homeassistant.components.prometheus" @dataclass class FilterTest: """Class for capturing a filter test.""" id: str should_pass: bool async def prometheus_client(hass, hass_client): """Initialize an hass_client with Prometheus component.""" await async_setup_component(hass, prometheus.DOMAIN, {prometheus.DOMAIN: {}}) await async_setup_component(hass, sensor.DOMAIN, {"sensor": [{"platform": "demo"}]}) await async_setup_component( hass, climate.DOMAIN, {"climate": [{"platform": "demo"}]} ) await hass.async_block_till_done() await async_setup_component( hass, humidifier.DOMAIN, {"humidifier": [{"platform": "demo"}]} ) sensor1 = DemoSensor( None, "Television Energy", 74, None, ENERGY_KILO_WATT_HOUR, None ) sensor1.hass = hass sensor1.entity_id = "sensor.television_energy" await sensor1.async_update_ha_state() sensor2 = DemoSensor( None, "Radio Energy", 14, DEVICE_CLASS_POWER, ENERGY_KILO_WATT_HOUR, None ) sensor2.hass = hass sensor2.entity_id = "sensor.radio_energy" with mock.patch( "homeassistant.util.dt.utcnow", return_value=datetime.datetime(1970, 1, 2, tzinfo=dt_util.UTC), ): await sensor2.async_update_ha_state() sensor3 = DemoSensor( None, "Electricity price", 0.123, None, f"SEK/{ENERGY_KILO_WATT_HOUR}", None ) sensor3.hass = hass sensor3.entity_id = "sensor.electricity_price" await sensor3.async_update_ha_state() sensor4 = DemoSensor(None, "Wind Direction", 25, None, DEGREE, None) sensor4.hass = hass sensor4.entity_id = "sensor.wind_direction" await sensor4.async_update_ha_state() sensor5 = DemoSensor( None, "SPS30 PM <1µm Weight concentration", 3.7069, None, CONCENTRATION_MICROGRAMS_PER_CUBIC_METER, None, ) sensor5.hass = hass sensor5.entity_id = "sensor.sps30_pm_1um_weight_concentration" await sensor5.async_update_ha_state() return await hass_client() async def test_view(hass, hass_client): """Test prometheus metrics view.""" client = await prometheus_client(hass, hass_client) resp = await client.get(prometheus.API_ENDPOINT) assert resp.status == 200 assert resp.headers["content-type"] == "text/plain" body = await resp.text() body = body.split("\n") assert len(body) > 3 assert "# HELP python_info Python platform information" in body assert ( "# HELP python_gc_objects_collected_total " "Objects collected during gc" in body ) assert ( 'temperature_c{domain="sensor",' 'entity="sensor.outside_temperature",' 'friendly_name="Outside Temperature"} 15.6' in body ) assert ( 'battery_level_percent{domain="sensor",' 'entity="sensor.outside_temperature",' 'friendly_name="Outside Temperature"} 12.0' in body ) assert ( 'current_temperature_c{domain="climate",' 'entity="climate.heatpump",' 'friendly_name="HeatPump"} 25.0' in body ) assert ( 'humidifier_target_humidity_percent{domain="humidifier",' 'entity="humidifier.humidifier",' 'friendly_name="Humidifier"} 68.0' in body ) assert ( 'humidifier_state{domain="humidifier",' 'entity="humidifier.dehumidifier",' 'friendly_name="Dehumidifier"} 1.0' in body ) assert ( 'humidifier_mode{domain="humidifier",' 'entity="humidifier.hygrostat",' 'friendly_name="Hygrostat",' 'mode="home"} 1.0' in body ) assert ( 'humidifier_mode{domain="humidifier",' 'entity="humidifier.hygrostat",' 'friendly_name="Hygrostat",' 'mode="eco"} 0.0' in body ) assert ( 'humidity_percent{domain="sensor",' 'entity="sensor.outside_humidity",' 'friendly_name="Outside Humidity"} 54.0' in body ) assert ( 'sensor_unit_kwh{domain="sensor",' 'entity="sensor.television_energy",' 'friendly_name="Television Energy"} 74.0' in body ) assert ( 'power_kwh{domain="sensor",' 'entity="sensor.radio_energy",' 'friendly_name="Radio Energy"} 14.0' in body ) assert ( 'entity_available{domain="sensor",' 'entity="sensor.radio_energy",' 'friendly_name="Radio Energy"} 1.0' in body ) assert ( 'last_updated_time_seconds{domain="sensor",' 'entity="sensor.radio_energy",' 'friendly_name="Radio Energy"} 86400.0' in body ) assert ( 'sensor_unit_sek_per_kwh{domain="sensor",' 'entity="sensor.electricity_price",' 'friendly_name="Electricity price"} 0.123' in body ) assert ( 'sensor_unit_u0xb0{domain="sensor",' 'entity="sensor.wind_direction",' 'friendly_name="Wind Direction"} 25.0' in body ) assert ( 'sensor_unit_u0xb5g_per_mu0xb3{domain="sensor",' 'entity="sensor.sps30_pm_1um_weight_concentration",' 'friendly_name="SPS30 PM <1µm Weight concentration"} 3.7069' in body ) @pytest.fixture(name="mock_client") def mock_client_fixture(): """Mock the prometheus client.""" with mock.patch(f"{PROMETHEUS_PATH}.prometheus_client") as client: counter_client = mock.MagicMock() client.Counter = mock.MagicMock(return_value=counter_client) setattr(counter_client, "labels", mock.MagicMock(return_value=mock.MagicMock())) yield counter_client @pytest.fixture def mock_bus(hass): """Mock the event bus listener.""" hass.bus.listen = mock.MagicMock() @pytest.mark.usefixtures("mock_bus") async def test_minimal_config(hass, mock_client): """Test the minimal config and defaults of component.""" config = {prometheus.DOMAIN: {}} assert await async_setup_component(hass, prometheus.DOMAIN, config) await hass.async_block_till_done() assert hass.bus.listen.called assert EVENT_STATE_CHANGED == hass.bus.listen.call_args_list[0][0][0] @pytest.mark.usefixtures("mock_bus") async def test_full_config(hass, mock_client): """Test the full config of component.""" config = { prometheus.DOMAIN: { "namespace": "ns", "default_metric": "m", "override_metric": "m", "component_config": {"fake.test": {"override_metric": "km"}}, "component_config_glob": {"fake.time_*": {"override_metric": "h"}}, "component_config_domain": {"climate": {"override_metric": "°C"}}, "filter": { "include_domains": ["climate"], "include_entity_globs": ["fake.time_*"], "include_entities": ["fake.test"], "exclude_domains": ["script"], "exclude_entity_globs": ["climate.excluded_*"], "exclude_entities": ["fake.time_excluded"], }, } } assert await async_setup_component(hass, prometheus.DOMAIN, config) await hass.async_block_till_done() assert hass.bus.listen.called assert EVENT_STATE_CHANGED == hass.bus.listen.call_args_list[0][0][0] def make_event(entity_id): """Make a mock event for test.""" domain = split_entity_id(entity_id)[0] state = mock.MagicMock( state="not blank", domain=domain, entity_id=entity_id, object_id="entity", attributes={}, ) return mock.MagicMock(data={"new_state": state}, time_fired=12345) async def _setup(hass, filter_config): """Shared set up for filtering tests.""" config = {prometheus.DOMAIN: {"filter": filter_config}} assert await async_setup_component(hass, prometheus.DOMAIN, config) await hass.async_block_till_done() return hass.bus.listen.call_args_list[0][0][1] @pytest.mark.usefixtures("mock_bus") async def test_allowlist(hass, mock_client): """Test an allowlist only config.""" handler_method = await _setup( hass, { "include_domains": ["fake"], "include_entity_globs": ["test.included_*"], "include_entities": ["not_real.included"], }, ) tests = [ FilterTest("climate.excluded", False), FilterTest("fake.included", True), FilterTest("test.excluded_test", False), FilterTest("test.included_test", True), FilterTest("not_real.included", True), FilterTest("not_real.excluded", False), ] for test in tests: event = make_event(test.id) handler_method(event) was_called = mock_client.labels.call_count == 1 assert test.should_pass == was_called mock_client.labels.reset_mock() @pytest.mark.usefixtures("mock_bus") async def test_denylist(hass, mock_client): """Test a denylist only config.""" handler_method = await _setup( hass, { "exclude_domains": ["fake"], "exclude_entity_globs": ["test.excluded_*"], "exclude_entities": ["not_real.excluded"], }, ) tests = [ FilterTest("fake.excluded", False), FilterTest("light.included", True), FilterTest("test.excluded_test", False), FilterTest("test.included_test", True), FilterTest("not_real.included", True), FilterTest("not_real.excluded", False), ] for test in tests: event = make_event(test.id) handler_method(event) was_called = mock_client.labels.call_count == 1 assert test.should_pass == was_called mock_client.labels.reset_mock() @pytest.mark.usefixtures("mock_bus") async def test_filtered_denylist(hass, mock_client): """Test a denylist config with a filtering allowlist.""" handler_method = await _setup( hass, { "include_entities": ["fake.included", "test.excluded_test"], "exclude_domains": ["fake"], "exclude_entity_globs": ["*.excluded_*"], "exclude_entities": ["not_real.excluded"], }, ) tests = [ FilterTest("fake.excluded", False), FilterTest("fake.included", True), FilterTest("alt_fake.excluded_test", False), FilterTest("test.excluded_test", True), FilterTest("not_real.excluded", False), FilterTest("not_real.included", True), ] for test in tests: event = make_event(test.id) handler_method(event) was_called = mock_client.labels.call_count == 1 assert test.should_pass == was_called mock_client.labels.reset_mock()
tchellomello/home-assistant
tests/components/prometheus/test_init.py
homeassistant/components/alarmdecoder/__init__.py
"""Support for Plaato Airlock.""" import logging from aiohttp import web import voluptuous as vol from homeassistant.components.sensor import DOMAIN as SENSOR from homeassistant.const import ( CONF_WEBHOOK_ID, HTTP_OK, TEMP_CELSIUS, TEMP_FAHRENHEIT, VOLUME_GALLONS, VOLUME_LITERS, ) import homeassistant.helpers.config_validation as cv from homeassistant.helpers.dispatcher import async_dispatcher_send from .const import DOMAIN _LOGGER = logging.getLogger(__name__) DEPENDENCIES = ["webhook"] PLAATO_DEVICE_SENSORS = "sensors" PLAATO_DEVICE_ATTRS = "attrs" ATTR_DEVICE_ID = "device_id" ATTR_DEVICE_NAME = "device_name" ATTR_TEMP_UNIT = "temp_unit" ATTR_VOLUME_UNIT = "volume_unit" ATTR_BPM = "bpm" ATTR_TEMP = "temp" ATTR_SG = "sg" ATTR_OG = "og" ATTR_BUBBLES = "bubbles" ATTR_ABV = "abv" ATTR_CO2_VOLUME = "co2_volume" ATTR_BATCH_VOLUME = "batch_volume" SENSOR_UPDATE = f"{DOMAIN}_sensor_update" SENSOR_DATA_KEY = f"{DOMAIN}.{SENSOR}" WEBHOOK_SCHEMA = vol.Schema( { vol.Required(ATTR_DEVICE_NAME): cv.string, vol.Required(ATTR_DEVICE_ID): cv.positive_int, vol.Required(ATTR_TEMP_UNIT): vol.Any(TEMP_CELSIUS, TEMP_FAHRENHEIT), vol.Required(ATTR_VOLUME_UNIT): vol.Any(VOLUME_LITERS, VOLUME_GALLONS), vol.Required(ATTR_BPM): cv.positive_int, vol.Required(ATTR_TEMP): vol.Coerce(float), vol.Required(ATTR_SG): vol.Coerce(float), vol.Required(ATTR_OG): vol.Coerce(float), vol.Required(ATTR_ABV): vol.Coerce(float), vol.Required(ATTR_CO2_VOLUME): vol.Coerce(float), vol.Required(ATTR_BATCH_VOLUME): vol.Coerce(float), vol.Required(ATTR_BUBBLES): cv.positive_int, }, extra=vol.ALLOW_EXTRA, ) async def async_setup(hass, hass_config): """Set up the Plaato component.""" return True async def async_setup_entry(hass, entry): """Configure based on config entry.""" if DOMAIN not in hass.data: hass.data[DOMAIN] = {} webhook_id = entry.data[CONF_WEBHOOK_ID] hass.components.webhook.async_register(DOMAIN, "Plaato", webhook_id, handle_webhook) hass.async_create_task(hass.config_entries.async_forward_entry_setup(entry, SENSOR)) return True async def async_unload_entry(hass, entry): """Unload a config entry.""" hass.components.webhook.async_unregister(entry.data[CONF_WEBHOOK_ID]) hass.data[SENSOR_DATA_KEY]() await hass.config_entries.async_forward_entry_unload(entry, SENSOR) return True async def handle_webhook(hass, webhook_id, request): """Handle incoming webhook from Plaato.""" try: data = WEBHOOK_SCHEMA(await request.json()) except vol.MultipleInvalid as error: _LOGGER.warning("An error occurred when parsing webhook data <%s>", error) return device_id = _device_id(data) attrs = { ATTR_DEVICE_NAME: data.get(ATTR_DEVICE_NAME), ATTR_DEVICE_ID: data.get(ATTR_DEVICE_ID), ATTR_TEMP_UNIT: data.get(ATTR_TEMP_UNIT), ATTR_VOLUME_UNIT: data.get(ATTR_VOLUME_UNIT), } sensors = { ATTR_TEMP: data.get(ATTR_TEMP), ATTR_BPM: data.get(ATTR_BPM), ATTR_SG: data.get(ATTR_SG), ATTR_OG: data.get(ATTR_OG), ATTR_ABV: data.get(ATTR_ABV), ATTR_CO2_VOLUME: data.get(ATTR_CO2_VOLUME), ATTR_BATCH_VOLUME: data.get(ATTR_BATCH_VOLUME), ATTR_BUBBLES: data.get(ATTR_BUBBLES), } hass.data[DOMAIN][device_id] = { PLAATO_DEVICE_ATTRS: attrs, PLAATO_DEVICE_SENSORS: sensors, } async_dispatcher_send(hass, SENSOR_UPDATE, device_id) return web.Response(text=f"Saving status for {device_id}", status=HTTP_OK) def _device_id(data): """Return name of device sensor.""" return f"{data.get(ATTR_DEVICE_NAME)}_{data.get(ATTR_DEVICE_ID)}"
"""The tests for the Prometheus exporter.""" from dataclasses import dataclass import datetime import pytest from homeassistant.components import climate, humidifier, sensor from homeassistant.components.demo.sensor import DemoSensor import homeassistant.components.prometheus as prometheus from homeassistant.const import ( CONCENTRATION_MICROGRAMS_PER_CUBIC_METER, DEGREE, DEVICE_CLASS_POWER, ENERGY_KILO_WATT_HOUR, EVENT_STATE_CHANGED, ) from homeassistant.core import split_entity_id from homeassistant.setup import async_setup_component from homeassistant.util import dt as dt_util import tests.async_mock as mock PROMETHEUS_PATH = "homeassistant.components.prometheus" @dataclass class FilterTest: """Class for capturing a filter test.""" id: str should_pass: bool async def prometheus_client(hass, hass_client): """Initialize an hass_client with Prometheus component.""" await async_setup_component(hass, prometheus.DOMAIN, {prometheus.DOMAIN: {}}) await async_setup_component(hass, sensor.DOMAIN, {"sensor": [{"platform": "demo"}]}) await async_setup_component( hass, climate.DOMAIN, {"climate": [{"platform": "demo"}]} ) await hass.async_block_till_done() await async_setup_component( hass, humidifier.DOMAIN, {"humidifier": [{"platform": "demo"}]} ) sensor1 = DemoSensor( None, "Television Energy", 74, None, ENERGY_KILO_WATT_HOUR, None ) sensor1.hass = hass sensor1.entity_id = "sensor.television_energy" await sensor1.async_update_ha_state() sensor2 = DemoSensor( None, "Radio Energy", 14, DEVICE_CLASS_POWER, ENERGY_KILO_WATT_HOUR, None ) sensor2.hass = hass sensor2.entity_id = "sensor.radio_energy" with mock.patch( "homeassistant.util.dt.utcnow", return_value=datetime.datetime(1970, 1, 2, tzinfo=dt_util.UTC), ): await sensor2.async_update_ha_state() sensor3 = DemoSensor( None, "Electricity price", 0.123, None, f"SEK/{ENERGY_KILO_WATT_HOUR}", None ) sensor3.hass = hass sensor3.entity_id = "sensor.electricity_price" await sensor3.async_update_ha_state() sensor4 = DemoSensor(None, "Wind Direction", 25, None, DEGREE, None) sensor4.hass = hass sensor4.entity_id = "sensor.wind_direction" await sensor4.async_update_ha_state() sensor5 = DemoSensor( None, "SPS30 PM <1µm Weight concentration", 3.7069, None, CONCENTRATION_MICROGRAMS_PER_CUBIC_METER, None, ) sensor5.hass = hass sensor5.entity_id = "sensor.sps30_pm_1um_weight_concentration" await sensor5.async_update_ha_state() return await hass_client() async def test_view(hass, hass_client): """Test prometheus metrics view.""" client = await prometheus_client(hass, hass_client) resp = await client.get(prometheus.API_ENDPOINT) assert resp.status == 200 assert resp.headers["content-type"] == "text/plain" body = await resp.text() body = body.split("\n") assert len(body) > 3 assert "# HELP python_info Python platform information" in body assert ( "# HELP python_gc_objects_collected_total " "Objects collected during gc" in body ) assert ( 'temperature_c{domain="sensor",' 'entity="sensor.outside_temperature",' 'friendly_name="Outside Temperature"} 15.6' in body ) assert ( 'battery_level_percent{domain="sensor",' 'entity="sensor.outside_temperature",' 'friendly_name="Outside Temperature"} 12.0' in body ) assert ( 'current_temperature_c{domain="climate",' 'entity="climate.heatpump",' 'friendly_name="HeatPump"} 25.0' in body ) assert ( 'humidifier_target_humidity_percent{domain="humidifier",' 'entity="humidifier.humidifier",' 'friendly_name="Humidifier"} 68.0' in body ) assert ( 'humidifier_state{domain="humidifier",' 'entity="humidifier.dehumidifier",' 'friendly_name="Dehumidifier"} 1.0' in body ) assert ( 'humidifier_mode{domain="humidifier",' 'entity="humidifier.hygrostat",' 'friendly_name="Hygrostat",' 'mode="home"} 1.0' in body ) assert ( 'humidifier_mode{domain="humidifier",' 'entity="humidifier.hygrostat",' 'friendly_name="Hygrostat",' 'mode="eco"} 0.0' in body ) assert ( 'humidity_percent{domain="sensor",' 'entity="sensor.outside_humidity",' 'friendly_name="Outside Humidity"} 54.0' in body ) assert ( 'sensor_unit_kwh{domain="sensor",' 'entity="sensor.television_energy",' 'friendly_name="Television Energy"} 74.0' in body ) assert ( 'power_kwh{domain="sensor",' 'entity="sensor.radio_energy",' 'friendly_name="Radio Energy"} 14.0' in body ) assert ( 'entity_available{domain="sensor",' 'entity="sensor.radio_energy",' 'friendly_name="Radio Energy"} 1.0' in body ) assert ( 'last_updated_time_seconds{domain="sensor",' 'entity="sensor.radio_energy",' 'friendly_name="Radio Energy"} 86400.0' in body ) assert ( 'sensor_unit_sek_per_kwh{domain="sensor",' 'entity="sensor.electricity_price",' 'friendly_name="Electricity price"} 0.123' in body ) assert ( 'sensor_unit_u0xb0{domain="sensor",' 'entity="sensor.wind_direction",' 'friendly_name="Wind Direction"} 25.0' in body ) assert ( 'sensor_unit_u0xb5g_per_mu0xb3{domain="sensor",' 'entity="sensor.sps30_pm_1um_weight_concentration",' 'friendly_name="SPS30 PM <1µm Weight concentration"} 3.7069' in body ) @pytest.fixture(name="mock_client") def mock_client_fixture(): """Mock the prometheus client.""" with mock.patch(f"{PROMETHEUS_PATH}.prometheus_client") as client: counter_client = mock.MagicMock() client.Counter = mock.MagicMock(return_value=counter_client) setattr(counter_client, "labels", mock.MagicMock(return_value=mock.MagicMock())) yield counter_client @pytest.fixture def mock_bus(hass): """Mock the event bus listener.""" hass.bus.listen = mock.MagicMock() @pytest.mark.usefixtures("mock_bus") async def test_minimal_config(hass, mock_client): """Test the minimal config and defaults of component.""" config = {prometheus.DOMAIN: {}} assert await async_setup_component(hass, prometheus.DOMAIN, config) await hass.async_block_till_done() assert hass.bus.listen.called assert EVENT_STATE_CHANGED == hass.bus.listen.call_args_list[0][0][0] @pytest.mark.usefixtures("mock_bus") async def test_full_config(hass, mock_client): """Test the full config of component.""" config = { prometheus.DOMAIN: { "namespace": "ns", "default_metric": "m", "override_metric": "m", "component_config": {"fake.test": {"override_metric": "km"}}, "component_config_glob": {"fake.time_*": {"override_metric": "h"}}, "component_config_domain": {"climate": {"override_metric": "°C"}}, "filter": { "include_domains": ["climate"], "include_entity_globs": ["fake.time_*"], "include_entities": ["fake.test"], "exclude_domains": ["script"], "exclude_entity_globs": ["climate.excluded_*"], "exclude_entities": ["fake.time_excluded"], }, } } assert await async_setup_component(hass, prometheus.DOMAIN, config) await hass.async_block_till_done() assert hass.bus.listen.called assert EVENT_STATE_CHANGED == hass.bus.listen.call_args_list[0][0][0] def make_event(entity_id): """Make a mock event for test.""" domain = split_entity_id(entity_id)[0] state = mock.MagicMock( state="not blank", domain=domain, entity_id=entity_id, object_id="entity", attributes={}, ) return mock.MagicMock(data={"new_state": state}, time_fired=12345) async def _setup(hass, filter_config): """Shared set up for filtering tests.""" config = {prometheus.DOMAIN: {"filter": filter_config}} assert await async_setup_component(hass, prometheus.DOMAIN, config) await hass.async_block_till_done() return hass.bus.listen.call_args_list[0][0][1] @pytest.mark.usefixtures("mock_bus") async def test_allowlist(hass, mock_client): """Test an allowlist only config.""" handler_method = await _setup( hass, { "include_domains": ["fake"], "include_entity_globs": ["test.included_*"], "include_entities": ["not_real.included"], }, ) tests = [ FilterTest("climate.excluded", False), FilterTest("fake.included", True), FilterTest("test.excluded_test", False), FilterTest("test.included_test", True), FilterTest("not_real.included", True), FilterTest("not_real.excluded", False), ] for test in tests: event = make_event(test.id) handler_method(event) was_called = mock_client.labels.call_count == 1 assert test.should_pass == was_called mock_client.labels.reset_mock() @pytest.mark.usefixtures("mock_bus") async def test_denylist(hass, mock_client): """Test a denylist only config.""" handler_method = await _setup( hass, { "exclude_domains": ["fake"], "exclude_entity_globs": ["test.excluded_*"], "exclude_entities": ["not_real.excluded"], }, ) tests = [ FilterTest("fake.excluded", False), FilterTest("light.included", True), FilterTest("test.excluded_test", False), FilterTest("test.included_test", True), FilterTest("not_real.included", True), FilterTest("not_real.excluded", False), ] for test in tests: event = make_event(test.id) handler_method(event) was_called = mock_client.labels.call_count == 1 assert test.should_pass == was_called mock_client.labels.reset_mock() @pytest.mark.usefixtures("mock_bus") async def test_filtered_denylist(hass, mock_client): """Test a denylist config with a filtering allowlist.""" handler_method = await _setup( hass, { "include_entities": ["fake.included", "test.excluded_test"], "exclude_domains": ["fake"], "exclude_entity_globs": ["*.excluded_*"], "exclude_entities": ["not_real.excluded"], }, ) tests = [ FilterTest("fake.excluded", False), FilterTest("fake.included", True), FilterTest("alt_fake.excluded_test", False), FilterTest("test.excluded_test", True), FilterTest("not_real.excluded", False), FilterTest("not_real.included", True), ] for test in tests: event = make_event(test.id) handler_method(event) was_called = mock_client.labels.call_count == 1 assert test.should_pass == was_called mock_client.labels.reset_mock()
tchellomello/home-assistant
tests/components/prometheus/test_init.py
homeassistant/components/plaato/__init__.py
"""Support for the Pico TTS speech service.""" import logging import os import shutil import subprocess import tempfile import voluptuous as vol from homeassistant.components.tts import CONF_LANG, PLATFORM_SCHEMA, Provider _LOGGER = logging.getLogger(__name__) SUPPORT_LANGUAGES = ["en-US", "en-GB", "de-DE", "es-ES", "fr-FR", "it-IT"] DEFAULT_LANG = "en-US" PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( {vol.Optional(CONF_LANG, default=DEFAULT_LANG): vol.In(SUPPORT_LANGUAGES)} ) def get_engine(hass, config, discovery_info=None): """Set up Pico speech component.""" if shutil.which("pico2wave") is None: _LOGGER.error("'pico2wave' was not found") return False return PicoProvider(config[CONF_LANG]) class PicoProvider(Provider): """The Pico TTS API provider.""" def __init__(self, lang): """Initialize Pico TTS provider.""" self._lang = lang self.name = "PicoTTS" @property def default_language(self): """Return the default language.""" return self._lang @property def supported_languages(self): """Return list of supported languages.""" return SUPPORT_LANGUAGES def get_tts_audio(self, message, language, options=None): """Load TTS using pico2wave.""" with tempfile.NamedTemporaryFile(suffix=".wav", delete=False) as tmpf: fname = tmpf.name cmd = ["pico2wave", "--wave", fname, "-l", language, message] subprocess.call(cmd) data = None try: with open(fname, "rb") as voice: data = voice.read() except OSError: _LOGGER.error("Error trying to read %s", fname) return (None, None) finally: os.remove(fname) if data: return ("wav", data) return (None, None)
"""The tests for the Prometheus exporter.""" from dataclasses import dataclass import datetime import pytest from homeassistant.components import climate, humidifier, sensor from homeassistant.components.demo.sensor import DemoSensor import homeassistant.components.prometheus as prometheus from homeassistant.const import ( CONCENTRATION_MICROGRAMS_PER_CUBIC_METER, DEGREE, DEVICE_CLASS_POWER, ENERGY_KILO_WATT_HOUR, EVENT_STATE_CHANGED, ) from homeassistant.core import split_entity_id from homeassistant.setup import async_setup_component from homeassistant.util import dt as dt_util import tests.async_mock as mock PROMETHEUS_PATH = "homeassistant.components.prometheus" @dataclass class FilterTest: """Class for capturing a filter test.""" id: str should_pass: bool async def prometheus_client(hass, hass_client): """Initialize an hass_client with Prometheus component.""" await async_setup_component(hass, prometheus.DOMAIN, {prometheus.DOMAIN: {}}) await async_setup_component(hass, sensor.DOMAIN, {"sensor": [{"platform": "demo"}]}) await async_setup_component( hass, climate.DOMAIN, {"climate": [{"platform": "demo"}]} ) await hass.async_block_till_done() await async_setup_component( hass, humidifier.DOMAIN, {"humidifier": [{"platform": "demo"}]} ) sensor1 = DemoSensor( None, "Television Energy", 74, None, ENERGY_KILO_WATT_HOUR, None ) sensor1.hass = hass sensor1.entity_id = "sensor.television_energy" await sensor1.async_update_ha_state() sensor2 = DemoSensor( None, "Radio Energy", 14, DEVICE_CLASS_POWER, ENERGY_KILO_WATT_HOUR, None ) sensor2.hass = hass sensor2.entity_id = "sensor.radio_energy" with mock.patch( "homeassistant.util.dt.utcnow", return_value=datetime.datetime(1970, 1, 2, tzinfo=dt_util.UTC), ): await sensor2.async_update_ha_state() sensor3 = DemoSensor( None, "Electricity price", 0.123, None, f"SEK/{ENERGY_KILO_WATT_HOUR}", None ) sensor3.hass = hass sensor3.entity_id = "sensor.electricity_price" await sensor3.async_update_ha_state() sensor4 = DemoSensor(None, "Wind Direction", 25, None, DEGREE, None) sensor4.hass = hass sensor4.entity_id = "sensor.wind_direction" await sensor4.async_update_ha_state() sensor5 = DemoSensor( None, "SPS30 PM <1µm Weight concentration", 3.7069, None, CONCENTRATION_MICROGRAMS_PER_CUBIC_METER, None, ) sensor5.hass = hass sensor5.entity_id = "sensor.sps30_pm_1um_weight_concentration" await sensor5.async_update_ha_state() return await hass_client() async def test_view(hass, hass_client): """Test prometheus metrics view.""" client = await prometheus_client(hass, hass_client) resp = await client.get(prometheus.API_ENDPOINT) assert resp.status == 200 assert resp.headers["content-type"] == "text/plain" body = await resp.text() body = body.split("\n") assert len(body) > 3 assert "# HELP python_info Python platform information" in body assert ( "# HELP python_gc_objects_collected_total " "Objects collected during gc" in body ) assert ( 'temperature_c{domain="sensor",' 'entity="sensor.outside_temperature",' 'friendly_name="Outside Temperature"} 15.6' in body ) assert ( 'battery_level_percent{domain="sensor",' 'entity="sensor.outside_temperature",' 'friendly_name="Outside Temperature"} 12.0' in body ) assert ( 'current_temperature_c{domain="climate",' 'entity="climate.heatpump",' 'friendly_name="HeatPump"} 25.0' in body ) assert ( 'humidifier_target_humidity_percent{domain="humidifier",' 'entity="humidifier.humidifier",' 'friendly_name="Humidifier"} 68.0' in body ) assert ( 'humidifier_state{domain="humidifier",' 'entity="humidifier.dehumidifier",' 'friendly_name="Dehumidifier"} 1.0' in body ) assert ( 'humidifier_mode{domain="humidifier",' 'entity="humidifier.hygrostat",' 'friendly_name="Hygrostat",' 'mode="home"} 1.0' in body ) assert ( 'humidifier_mode{domain="humidifier",' 'entity="humidifier.hygrostat",' 'friendly_name="Hygrostat",' 'mode="eco"} 0.0' in body ) assert ( 'humidity_percent{domain="sensor",' 'entity="sensor.outside_humidity",' 'friendly_name="Outside Humidity"} 54.0' in body ) assert ( 'sensor_unit_kwh{domain="sensor",' 'entity="sensor.television_energy",' 'friendly_name="Television Energy"} 74.0' in body ) assert ( 'power_kwh{domain="sensor",' 'entity="sensor.radio_energy",' 'friendly_name="Radio Energy"} 14.0' in body ) assert ( 'entity_available{domain="sensor",' 'entity="sensor.radio_energy",' 'friendly_name="Radio Energy"} 1.0' in body ) assert ( 'last_updated_time_seconds{domain="sensor",' 'entity="sensor.radio_energy",' 'friendly_name="Radio Energy"} 86400.0' in body ) assert ( 'sensor_unit_sek_per_kwh{domain="sensor",' 'entity="sensor.electricity_price",' 'friendly_name="Electricity price"} 0.123' in body ) assert ( 'sensor_unit_u0xb0{domain="sensor",' 'entity="sensor.wind_direction",' 'friendly_name="Wind Direction"} 25.0' in body ) assert ( 'sensor_unit_u0xb5g_per_mu0xb3{domain="sensor",' 'entity="sensor.sps30_pm_1um_weight_concentration",' 'friendly_name="SPS30 PM <1µm Weight concentration"} 3.7069' in body ) @pytest.fixture(name="mock_client") def mock_client_fixture(): """Mock the prometheus client.""" with mock.patch(f"{PROMETHEUS_PATH}.prometheus_client") as client: counter_client = mock.MagicMock() client.Counter = mock.MagicMock(return_value=counter_client) setattr(counter_client, "labels", mock.MagicMock(return_value=mock.MagicMock())) yield counter_client @pytest.fixture def mock_bus(hass): """Mock the event bus listener.""" hass.bus.listen = mock.MagicMock() @pytest.mark.usefixtures("mock_bus") async def test_minimal_config(hass, mock_client): """Test the minimal config and defaults of component.""" config = {prometheus.DOMAIN: {}} assert await async_setup_component(hass, prometheus.DOMAIN, config) await hass.async_block_till_done() assert hass.bus.listen.called assert EVENT_STATE_CHANGED == hass.bus.listen.call_args_list[0][0][0] @pytest.mark.usefixtures("mock_bus") async def test_full_config(hass, mock_client): """Test the full config of component.""" config = { prometheus.DOMAIN: { "namespace": "ns", "default_metric": "m", "override_metric": "m", "component_config": {"fake.test": {"override_metric": "km"}}, "component_config_glob": {"fake.time_*": {"override_metric": "h"}}, "component_config_domain": {"climate": {"override_metric": "°C"}}, "filter": { "include_domains": ["climate"], "include_entity_globs": ["fake.time_*"], "include_entities": ["fake.test"], "exclude_domains": ["script"], "exclude_entity_globs": ["climate.excluded_*"], "exclude_entities": ["fake.time_excluded"], }, } } assert await async_setup_component(hass, prometheus.DOMAIN, config) await hass.async_block_till_done() assert hass.bus.listen.called assert EVENT_STATE_CHANGED == hass.bus.listen.call_args_list[0][0][0] def make_event(entity_id): """Make a mock event for test.""" domain = split_entity_id(entity_id)[0] state = mock.MagicMock( state="not blank", domain=domain, entity_id=entity_id, object_id="entity", attributes={}, ) return mock.MagicMock(data={"new_state": state}, time_fired=12345) async def _setup(hass, filter_config): """Shared set up for filtering tests.""" config = {prometheus.DOMAIN: {"filter": filter_config}} assert await async_setup_component(hass, prometheus.DOMAIN, config) await hass.async_block_till_done() return hass.bus.listen.call_args_list[0][0][1] @pytest.mark.usefixtures("mock_bus") async def test_allowlist(hass, mock_client): """Test an allowlist only config.""" handler_method = await _setup( hass, { "include_domains": ["fake"], "include_entity_globs": ["test.included_*"], "include_entities": ["not_real.included"], }, ) tests = [ FilterTest("climate.excluded", False), FilterTest("fake.included", True), FilterTest("test.excluded_test", False), FilterTest("test.included_test", True), FilterTest("not_real.included", True), FilterTest("not_real.excluded", False), ] for test in tests: event = make_event(test.id) handler_method(event) was_called = mock_client.labels.call_count == 1 assert test.should_pass == was_called mock_client.labels.reset_mock() @pytest.mark.usefixtures("mock_bus") async def test_denylist(hass, mock_client): """Test a denylist only config.""" handler_method = await _setup( hass, { "exclude_domains": ["fake"], "exclude_entity_globs": ["test.excluded_*"], "exclude_entities": ["not_real.excluded"], }, ) tests = [ FilterTest("fake.excluded", False), FilterTest("light.included", True), FilterTest("test.excluded_test", False), FilterTest("test.included_test", True), FilterTest("not_real.included", True), FilterTest("not_real.excluded", False), ] for test in tests: event = make_event(test.id) handler_method(event) was_called = mock_client.labels.call_count == 1 assert test.should_pass == was_called mock_client.labels.reset_mock() @pytest.mark.usefixtures("mock_bus") async def test_filtered_denylist(hass, mock_client): """Test a denylist config with a filtering allowlist.""" handler_method = await _setup( hass, { "include_entities": ["fake.included", "test.excluded_test"], "exclude_domains": ["fake"], "exclude_entity_globs": ["*.excluded_*"], "exclude_entities": ["not_real.excluded"], }, ) tests = [ FilterTest("fake.excluded", False), FilterTest("fake.included", True), FilterTest("alt_fake.excluded_test", False), FilterTest("test.excluded_test", True), FilterTest("not_real.excluded", False), FilterTest("not_real.included", True), ] for test in tests: event = make_event(test.id) handler_method(event) was_called = mock_client.labels.call_count == 1 assert test.should_pass == was_called mock_client.labels.reset_mock()
tchellomello/home-assistant
tests/components/prometheus/test_init.py
homeassistant/components/picotts/tts.py
"""Support for Dominos Pizza ordering.""" from datetime import timedelta import logging from pizzapi import Address, Customer, Order from pizzapi.address import StoreException import voluptuous as vol from homeassistant.components import http from homeassistant.core import callback import homeassistant.helpers.config_validation as cv from homeassistant.helpers.entity import Entity from homeassistant.helpers.entity_component import EntityComponent from homeassistant.util import Throttle _LOGGER = logging.getLogger(__name__) # The domain of your component. Should be equal to the name of your component. DOMAIN = "dominos" ENTITY_ID_FORMAT = DOMAIN + ".{}" ATTR_COUNTRY = "country_code" ATTR_FIRST_NAME = "first_name" ATTR_LAST_NAME = "last_name" ATTR_EMAIL = "email" ATTR_PHONE = "phone" ATTR_ADDRESS = "address" ATTR_ORDERS = "orders" ATTR_SHOW_MENU = "show_menu" ATTR_ORDER_ENTITY = "order_entity_id" ATTR_ORDER_NAME = "name" ATTR_ORDER_CODES = "codes" MIN_TIME_BETWEEN_UPDATES = timedelta(minutes=10) MIN_TIME_BETWEEN_STORE_UPDATES = timedelta(minutes=3330) _ORDERS_SCHEMA = vol.Schema( { vol.Required(ATTR_ORDER_NAME): cv.string, vol.Required(ATTR_ORDER_CODES): vol.All(cv.ensure_list, [cv.string]), } ) CONFIG_SCHEMA = vol.Schema( { DOMAIN: vol.Schema( { vol.Required(ATTR_COUNTRY): cv.string, vol.Required(ATTR_FIRST_NAME): cv.string, vol.Required(ATTR_LAST_NAME): cv.string, vol.Required(ATTR_EMAIL): cv.string, vol.Required(ATTR_PHONE): cv.string, vol.Required(ATTR_ADDRESS): cv.string, vol.Optional(ATTR_SHOW_MENU): cv.boolean, vol.Optional(ATTR_ORDERS, default=[]): vol.All( cv.ensure_list, [_ORDERS_SCHEMA] ), } ) }, extra=vol.ALLOW_EXTRA, ) def setup(hass, config): """Set up is called when Home Assistant is loading our component.""" dominos = Dominos(hass, config) component = EntityComponent(_LOGGER, DOMAIN, hass) hass.data[DOMAIN] = {} entities = [] conf = config[DOMAIN] hass.services.register(DOMAIN, "order", dominos.handle_order) if conf.get(ATTR_SHOW_MENU): hass.http.register_view(DominosProductListView(dominos)) for order_info in conf.get(ATTR_ORDERS): order = DominosOrder(order_info, dominos) entities.append(order) if entities: component.add_entities(entities) # Return boolean to indicate that initialization was successfully. return True class Dominos: """Main Dominos service.""" def __init__(self, hass, config): """Set up main service.""" conf = config[DOMAIN] self.hass = hass self.customer = Customer( conf.get(ATTR_FIRST_NAME), conf.get(ATTR_LAST_NAME), conf.get(ATTR_EMAIL), conf.get(ATTR_PHONE), conf.get(ATTR_ADDRESS), ) self.address = Address( *self.customer.address.split(","), country=conf.get(ATTR_COUNTRY) ) self.country = conf.get(ATTR_COUNTRY) try: self.closest_store = self.address.closest_store() except StoreException: self.closest_store = None def handle_order(self, call): """Handle ordering pizza.""" entity_ids = call.data.get(ATTR_ORDER_ENTITY) target_orders = [ order for order in self.hass.data[DOMAIN]["entities"] if order.entity_id in entity_ids ] for order in target_orders: order.place() @Throttle(MIN_TIME_BETWEEN_STORE_UPDATES) def update_closest_store(self): """Update the shared closest store (if open).""" try: self.closest_store = self.address.closest_store() return True except StoreException: self.closest_store = None return False def get_menu(self): """Return the products from the closest stores menu.""" self.update_closest_store() if self.closest_store is None: _LOGGER.warning("Cannot get menu. Store may be closed") return [] menu = self.closest_store.get_menu() product_entries = [] for product in menu.products: item = {} if isinstance(product.menu_data["Variants"], list): variants = ", ".join(product.menu_data["Variants"]) else: variants = product.menu_data["Variants"] item["name"] = product.name item["variants"] = variants product_entries.append(item) return product_entries class DominosProductListView(http.HomeAssistantView): """View to retrieve product list content.""" url = "/api/dominos" name = "api:dominos" def __init__(self, dominos): """Initialize suite view.""" self.dominos = dominos @callback def get(self, request): """Retrieve if API is running.""" return self.json(self.dominos.get_menu()) class DominosOrder(Entity): """Represents a Dominos order entity.""" def __init__(self, order_info, dominos): """Set up the entity.""" self._name = order_info["name"] self._product_codes = order_info["codes"] self._orderable = False self.dominos = dominos @property def name(self): """Return the orders name.""" return self._name @property def product_codes(self): """Return the orders product codes.""" return self._product_codes @property def orderable(self): """Return the true if orderable.""" return self._orderable @property def state(self): """Return the state either closed, orderable or unorderable.""" if self.dominos.closest_store is None: return "closed" return "orderable" if self._orderable else "unorderable" @Throttle(MIN_TIME_BETWEEN_UPDATES) def update(self): """Update the order state and refreshes the store.""" try: self.dominos.update_closest_store() except StoreException: self._orderable = False return try: order = self.order() order.pay_with() self._orderable = True except StoreException: self._orderable = False def order(self): """Create the order object.""" if self.dominos.closest_store is None: raise StoreException order = Order( self.dominos.closest_store, self.dominos.customer, self.dominos.address, self.dominos.country, ) for code in self._product_codes: order.add_item(code) return order def place(self): """Place the order.""" try: order = self.order() order.place() except StoreException: self._orderable = False _LOGGER.warning( "Attempted to order Dominos - Order invalid or store closed" )
"""The tests for the Prometheus exporter.""" from dataclasses import dataclass import datetime import pytest from homeassistant.components import climate, humidifier, sensor from homeassistant.components.demo.sensor import DemoSensor import homeassistant.components.prometheus as prometheus from homeassistant.const import ( CONCENTRATION_MICROGRAMS_PER_CUBIC_METER, DEGREE, DEVICE_CLASS_POWER, ENERGY_KILO_WATT_HOUR, EVENT_STATE_CHANGED, ) from homeassistant.core import split_entity_id from homeassistant.setup import async_setup_component from homeassistant.util import dt as dt_util import tests.async_mock as mock PROMETHEUS_PATH = "homeassistant.components.prometheus" @dataclass class FilterTest: """Class for capturing a filter test.""" id: str should_pass: bool async def prometheus_client(hass, hass_client): """Initialize an hass_client with Prometheus component.""" await async_setup_component(hass, prometheus.DOMAIN, {prometheus.DOMAIN: {}}) await async_setup_component(hass, sensor.DOMAIN, {"sensor": [{"platform": "demo"}]}) await async_setup_component( hass, climate.DOMAIN, {"climate": [{"platform": "demo"}]} ) await hass.async_block_till_done() await async_setup_component( hass, humidifier.DOMAIN, {"humidifier": [{"platform": "demo"}]} ) sensor1 = DemoSensor( None, "Television Energy", 74, None, ENERGY_KILO_WATT_HOUR, None ) sensor1.hass = hass sensor1.entity_id = "sensor.television_energy" await sensor1.async_update_ha_state() sensor2 = DemoSensor( None, "Radio Energy", 14, DEVICE_CLASS_POWER, ENERGY_KILO_WATT_HOUR, None ) sensor2.hass = hass sensor2.entity_id = "sensor.radio_energy" with mock.patch( "homeassistant.util.dt.utcnow", return_value=datetime.datetime(1970, 1, 2, tzinfo=dt_util.UTC), ): await sensor2.async_update_ha_state() sensor3 = DemoSensor( None, "Electricity price", 0.123, None, f"SEK/{ENERGY_KILO_WATT_HOUR}", None ) sensor3.hass = hass sensor3.entity_id = "sensor.electricity_price" await sensor3.async_update_ha_state() sensor4 = DemoSensor(None, "Wind Direction", 25, None, DEGREE, None) sensor4.hass = hass sensor4.entity_id = "sensor.wind_direction" await sensor4.async_update_ha_state() sensor5 = DemoSensor( None, "SPS30 PM <1µm Weight concentration", 3.7069, None, CONCENTRATION_MICROGRAMS_PER_CUBIC_METER, None, ) sensor5.hass = hass sensor5.entity_id = "sensor.sps30_pm_1um_weight_concentration" await sensor5.async_update_ha_state() return await hass_client() async def test_view(hass, hass_client): """Test prometheus metrics view.""" client = await prometheus_client(hass, hass_client) resp = await client.get(prometheus.API_ENDPOINT) assert resp.status == 200 assert resp.headers["content-type"] == "text/plain" body = await resp.text() body = body.split("\n") assert len(body) > 3 assert "# HELP python_info Python platform information" in body assert ( "# HELP python_gc_objects_collected_total " "Objects collected during gc" in body ) assert ( 'temperature_c{domain="sensor",' 'entity="sensor.outside_temperature",' 'friendly_name="Outside Temperature"} 15.6' in body ) assert ( 'battery_level_percent{domain="sensor",' 'entity="sensor.outside_temperature",' 'friendly_name="Outside Temperature"} 12.0' in body ) assert ( 'current_temperature_c{domain="climate",' 'entity="climate.heatpump",' 'friendly_name="HeatPump"} 25.0' in body ) assert ( 'humidifier_target_humidity_percent{domain="humidifier",' 'entity="humidifier.humidifier",' 'friendly_name="Humidifier"} 68.0' in body ) assert ( 'humidifier_state{domain="humidifier",' 'entity="humidifier.dehumidifier",' 'friendly_name="Dehumidifier"} 1.0' in body ) assert ( 'humidifier_mode{domain="humidifier",' 'entity="humidifier.hygrostat",' 'friendly_name="Hygrostat",' 'mode="home"} 1.0' in body ) assert ( 'humidifier_mode{domain="humidifier",' 'entity="humidifier.hygrostat",' 'friendly_name="Hygrostat",' 'mode="eco"} 0.0' in body ) assert ( 'humidity_percent{domain="sensor",' 'entity="sensor.outside_humidity",' 'friendly_name="Outside Humidity"} 54.0' in body ) assert ( 'sensor_unit_kwh{domain="sensor",' 'entity="sensor.television_energy",' 'friendly_name="Television Energy"} 74.0' in body ) assert ( 'power_kwh{domain="sensor",' 'entity="sensor.radio_energy",' 'friendly_name="Radio Energy"} 14.0' in body ) assert ( 'entity_available{domain="sensor",' 'entity="sensor.radio_energy",' 'friendly_name="Radio Energy"} 1.0' in body ) assert ( 'last_updated_time_seconds{domain="sensor",' 'entity="sensor.radio_energy",' 'friendly_name="Radio Energy"} 86400.0' in body ) assert ( 'sensor_unit_sek_per_kwh{domain="sensor",' 'entity="sensor.electricity_price",' 'friendly_name="Electricity price"} 0.123' in body ) assert ( 'sensor_unit_u0xb0{domain="sensor",' 'entity="sensor.wind_direction",' 'friendly_name="Wind Direction"} 25.0' in body ) assert ( 'sensor_unit_u0xb5g_per_mu0xb3{domain="sensor",' 'entity="sensor.sps30_pm_1um_weight_concentration",' 'friendly_name="SPS30 PM <1µm Weight concentration"} 3.7069' in body ) @pytest.fixture(name="mock_client") def mock_client_fixture(): """Mock the prometheus client.""" with mock.patch(f"{PROMETHEUS_PATH}.prometheus_client") as client: counter_client = mock.MagicMock() client.Counter = mock.MagicMock(return_value=counter_client) setattr(counter_client, "labels", mock.MagicMock(return_value=mock.MagicMock())) yield counter_client @pytest.fixture def mock_bus(hass): """Mock the event bus listener.""" hass.bus.listen = mock.MagicMock() @pytest.mark.usefixtures("mock_bus") async def test_minimal_config(hass, mock_client): """Test the minimal config and defaults of component.""" config = {prometheus.DOMAIN: {}} assert await async_setup_component(hass, prometheus.DOMAIN, config) await hass.async_block_till_done() assert hass.bus.listen.called assert EVENT_STATE_CHANGED == hass.bus.listen.call_args_list[0][0][0] @pytest.mark.usefixtures("mock_bus") async def test_full_config(hass, mock_client): """Test the full config of component.""" config = { prometheus.DOMAIN: { "namespace": "ns", "default_metric": "m", "override_metric": "m", "component_config": {"fake.test": {"override_metric": "km"}}, "component_config_glob": {"fake.time_*": {"override_metric": "h"}}, "component_config_domain": {"climate": {"override_metric": "°C"}}, "filter": { "include_domains": ["climate"], "include_entity_globs": ["fake.time_*"], "include_entities": ["fake.test"], "exclude_domains": ["script"], "exclude_entity_globs": ["climate.excluded_*"], "exclude_entities": ["fake.time_excluded"], }, } } assert await async_setup_component(hass, prometheus.DOMAIN, config) await hass.async_block_till_done() assert hass.bus.listen.called assert EVENT_STATE_CHANGED == hass.bus.listen.call_args_list[0][0][0] def make_event(entity_id): """Make a mock event for test.""" domain = split_entity_id(entity_id)[0] state = mock.MagicMock( state="not blank", domain=domain, entity_id=entity_id, object_id="entity", attributes={}, ) return mock.MagicMock(data={"new_state": state}, time_fired=12345) async def _setup(hass, filter_config): """Shared set up for filtering tests.""" config = {prometheus.DOMAIN: {"filter": filter_config}} assert await async_setup_component(hass, prometheus.DOMAIN, config) await hass.async_block_till_done() return hass.bus.listen.call_args_list[0][0][1] @pytest.mark.usefixtures("mock_bus") async def test_allowlist(hass, mock_client): """Test an allowlist only config.""" handler_method = await _setup( hass, { "include_domains": ["fake"], "include_entity_globs": ["test.included_*"], "include_entities": ["not_real.included"], }, ) tests = [ FilterTest("climate.excluded", False), FilterTest("fake.included", True), FilterTest("test.excluded_test", False), FilterTest("test.included_test", True), FilterTest("not_real.included", True), FilterTest("not_real.excluded", False), ] for test in tests: event = make_event(test.id) handler_method(event) was_called = mock_client.labels.call_count == 1 assert test.should_pass == was_called mock_client.labels.reset_mock() @pytest.mark.usefixtures("mock_bus") async def test_denylist(hass, mock_client): """Test a denylist only config.""" handler_method = await _setup( hass, { "exclude_domains": ["fake"], "exclude_entity_globs": ["test.excluded_*"], "exclude_entities": ["not_real.excluded"], }, ) tests = [ FilterTest("fake.excluded", False), FilterTest("light.included", True), FilterTest("test.excluded_test", False), FilterTest("test.included_test", True), FilterTest("not_real.included", True), FilterTest("not_real.excluded", False), ] for test in tests: event = make_event(test.id) handler_method(event) was_called = mock_client.labels.call_count == 1 assert test.should_pass == was_called mock_client.labels.reset_mock() @pytest.mark.usefixtures("mock_bus") async def test_filtered_denylist(hass, mock_client): """Test a denylist config with a filtering allowlist.""" handler_method = await _setup( hass, { "include_entities": ["fake.included", "test.excluded_test"], "exclude_domains": ["fake"], "exclude_entity_globs": ["*.excluded_*"], "exclude_entities": ["not_real.excluded"], }, ) tests = [ FilterTest("fake.excluded", False), FilterTest("fake.included", True), FilterTest("alt_fake.excluded_test", False), FilterTest("test.excluded_test", True), FilterTest("not_real.excluded", False), FilterTest("not_real.included", True), ] for test in tests: event = make_event(test.id) handler_method(event) was_called = mock_client.labels.call_count == 1 assert test.should_pass == was_called mock_client.labels.reset_mock()
tchellomello/home-assistant
tests/components/prometheus/test_init.py
homeassistant/components/dominos/__init__.py
"""Support for Vera switches.""" import logging from typing import Any, Callable, List, Optional import pyvera as veraApi from homeassistant.components.switch import ( DOMAIN as PLATFORM_DOMAIN, ENTITY_ID_FORMAT, SwitchEntity, ) from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.entity import Entity from homeassistant.util import convert from . import VeraDevice from .common import ControllerData, get_controller_data _LOGGER = logging.getLogger(__name__) async def async_setup_entry( hass: HomeAssistant, entry: ConfigEntry, async_add_entities: Callable[[List[Entity], bool], None], ) -> None: """Set up the sensor config entry.""" controller_data = get_controller_data(hass, entry) async_add_entities( [ VeraSwitch(device, controller_data) for device in controller_data.devices.get(PLATFORM_DOMAIN) ] ) class VeraSwitch(VeraDevice[veraApi.VeraSwitch], SwitchEntity): """Representation of a Vera Switch.""" def __init__( self, vera_device: veraApi.VeraSwitch, controller_data: ControllerData ): """Initialize the Vera device.""" self._state = False VeraDevice.__init__(self, vera_device, controller_data) self.entity_id = ENTITY_ID_FORMAT.format(self.vera_id) def turn_on(self, **kwargs: Any) -> None: """Turn device on.""" self.vera_device.switch_on() self._state = True self.schedule_update_ha_state() def turn_off(self, **kwargs: Any) -> None: """Turn device off.""" self.vera_device.switch_off() self._state = False self.schedule_update_ha_state() @property def current_power_w(self) -> Optional[float]: """Return the current power usage in W.""" power = self.vera_device.power if power: return convert(power, float, 0.0) @property def is_on(self) -> bool: """Return true if device is on.""" return self._state def update(self) -> None: """Update device state.""" self._state = self.vera_device.is_switched_on()
"""The tests for the Prometheus exporter.""" from dataclasses import dataclass import datetime import pytest from homeassistant.components import climate, humidifier, sensor from homeassistant.components.demo.sensor import DemoSensor import homeassistant.components.prometheus as prometheus from homeassistant.const import ( CONCENTRATION_MICROGRAMS_PER_CUBIC_METER, DEGREE, DEVICE_CLASS_POWER, ENERGY_KILO_WATT_HOUR, EVENT_STATE_CHANGED, ) from homeassistant.core import split_entity_id from homeassistant.setup import async_setup_component from homeassistant.util import dt as dt_util import tests.async_mock as mock PROMETHEUS_PATH = "homeassistant.components.prometheus" @dataclass class FilterTest: """Class for capturing a filter test.""" id: str should_pass: bool async def prometheus_client(hass, hass_client): """Initialize an hass_client with Prometheus component.""" await async_setup_component(hass, prometheus.DOMAIN, {prometheus.DOMAIN: {}}) await async_setup_component(hass, sensor.DOMAIN, {"sensor": [{"platform": "demo"}]}) await async_setup_component( hass, climate.DOMAIN, {"climate": [{"platform": "demo"}]} ) await hass.async_block_till_done() await async_setup_component( hass, humidifier.DOMAIN, {"humidifier": [{"platform": "demo"}]} ) sensor1 = DemoSensor( None, "Television Energy", 74, None, ENERGY_KILO_WATT_HOUR, None ) sensor1.hass = hass sensor1.entity_id = "sensor.television_energy" await sensor1.async_update_ha_state() sensor2 = DemoSensor( None, "Radio Energy", 14, DEVICE_CLASS_POWER, ENERGY_KILO_WATT_HOUR, None ) sensor2.hass = hass sensor2.entity_id = "sensor.radio_energy" with mock.patch( "homeassistant.util.dt.utcnow", return_value=datetime.datetime(1970, 1, 2, tzinfo=dt_util.UTC), ): await sensor2.async_update_ha_state() sensor3 = DemoSensor( None, "Electricity price", 0.123, None, f"SEK/{ENERGY_KILO_WATT_HOUR}", None ) sensor3.hass = hass sensor3.entity_id = "sensor.electricity_price" await sensor3.async_update_ha_state() sensor4 = DemoSensor(None, "Wind Direction", 25, None, DEGREE, None) sensor4.hass = hass sensor4.entity_id = "sensor.wind_direction" await sensor4.async_update_ha_state() sensor5 = DemoSensor( None, "SPS30 PM <1µm Weight concentration", 3.7069, None, CONCENTRATION_MICROGRAMS_PER_CUBIC_METER, None, ) sensor5.hass = hass sensor5.entity_id = "sensor.sps30_pm_1um_weight_concentration" await sensor5.async_update_ha_state() return await hass_client() async def test_view(hass, hass_client): """Test prometheus metrics view.""" client = await prometheus_client(hass, hass_client) resp = await client.get(prometheus.API_ENDPOINT) assert resp.status == 200 assert resp.headers["content-type"] == "text/plain" body = await resp.text() body = body.split("\n") assert len(body) > 3 assert "# HELP python_info Python platform information" in body assert ( "# HELP python_gc_objects_collected_total " "Objects collected during gc" in body ) assert ( 'temperature_c{domain="sensor",' 'entity="sensor.outside_temperature",' 'friendly_name="Outside Temperature"} 15.6' in body ) assert ( 'battery_level_percent{domain="sensor",' 'entity="sensor.outside_temperature",' 'friendly_name="Outside Temperature"} 12.0' in body ) assert ( 'current_temperature_c{domain="climate",' 'entity="climate.heatpump",' 'friendly_name="HeatPump"} 25.0' in body ) assert ( 'humidifier_target_humidity_percent{domain="humidifier",' 'entity="humidifier.humidifier",' 'friendly_name="Humidifier"} 68.0' in body ) assert ( 'humidifier_state{domain="humidifier",' 'entity="humidifier.dehumidifier",' 'friendly_name="Dehumidifier"} 1.0' in body ) assert ( 'humidifier_mode{domain="humidifier",' 'entity="humidifier.hygrostat",' 'friendly_name="Hygrostat",' 'mode="home"} 1.0' in body ) assert ( 'humidifier_mode{domain="humidifier",' 'entity="humidifier.hygrostat",' 'friendly_name="Hygrostat",' 'mode="eco"} 0.0' in body ) assert ( 'humidity_percent{domain="sensor",' 'entity="sensor.outside_humidity",' 'friendly_name="Outside Humidity"} 54.0' in body ) assert ( 'sensor_unit_kwh{domain="sensor",' 'entity="sensor.television_energy",' 'friendly_name="Television Energy"} 74.0' in body ) assert ( 'power_kwh{domain="sensor",' 'entity="sensor.radio_energy",' 'friendly_name="Radio Energy"} 14.0' in body ) assert ( 'entity_available{domain="sensor",' 'entity="sensor.radio_energy",' 'friendly_name="Radio Energy"} 1.0' in body ) assert ( 'last_updated_time_seconds{domain="sensor",' 'entity="sensor.radio_energy",' 'friendly_name="Radio Energy"} 86400.0' in body ) assert ( 'sensor_unit_sek_per_kwh{domain="sensor",' 'entity="sensor.electricity_price",' 'friendly_name="Electricity price"} 0.123' in body ) assert ( 'sensor_unit_u0xb0{domain="sensor",' 'entity="sensor.wind_direction",' 'friendly_name="Wind Direction"} 25.0' in body ) assert ( 'sensor_unit_u0xb5g_per_mu0xb3{domain="sensor",' 'entity="sensor.sps30_pm_1um_weight_concentration",' 'friendly_name="SPS30 PM <1µm Weight concentration"} 3.7069' in body ) @pytest.fixture(name="mock_client") def mock_client_fixture(): """Mock the prometheus client.""" with mock.patch(f"{PROMETHEUS_PATH}.prometheus_client") as client: counter_client = mock.MagicMock() client.Counter = mock.MagicMock(return_value=counter_client) setattr(counter_client, "labels", mock.MagicMock(return_value=mock.MagicMock())) yield counter_client @pytest.fixture def mock_bus(hass): """Mock the event bus listener.""" hass.bus.listen = mock.MagicMock() @pytest.mark.usefixtures("mock_bus") async def test_minimal_config(hass, mock_client): """Test the minimal config and defaults of component.""" config = {prometheus.DOMAIN: {}} assert await async_setup_component(hass, prometheus.DOMAIN, config) await hass.async_block_till_done() assert hass.bus.listen.called assert EVENT_STATE_CHANGED == hass.bus.listen.call_args_list[0][0][0] @pytest.mark.usefixtures("mock_bus") async def test_full_config(hass, mock_client): """Test the full config of component.""" config = { prometheus.DOMAIN: { "namespace": "ns", "default_metric": "m", "override_metric": "m", "component_config": {"fake.test": {"override_metric": "km"}}, "component_config_glob": {"fake.time_*": {"override_metric": "h"}}, "component_config_domain": {"climate": {"override_metric": "°C"}}, "filter": { "include_domains": ["climate"], "include_entity_globs": ["fake.time_*"], "include_entities": ["fake.test"], "exclude_domains": ["script"], "exclude_entity_globs": ["climate.excluded_*"], "exclude_entities": ["fake.time_excluded"], }, } } assert await async_setup_component(hass, prometheus.DOMAIN, config) await hass.async_block_till_done() assert hass.bus.listen.called assert EVENT_STATE_CHANGED == hass.bus.listen.call_args_list[0][0][0] def make_event(entity_id): """Make a mock event for test.""" domain = split_entity_id(entity_id)[0] state = mock.MagicMock( state="not blank", domain=domain, entity_id=entity_id, object_id="entity", attributes={}, ) return mock.MagicMock(data={"new_state": state}, time_fired=12345) async def _setup(hass, filter_config): """Shared set up for filtering tests.""" config = {prometheus.DOMAIN: {"filter": filter_config}} assert await async_setup_component(hass, prometheus.DOMAIN, config) await hass.async_block_till_done() return hass.bus.listen.call_args_list[0][0][1] @pytest.mark.usefixtures("mock_bus") async def test_allowlist(hass, mock_client): """Test an allowlist only config.""" handler_method = await _setup( hass, { "include_domains": ["fake"], "include_entity_globs": ["test.included_*"], "include_entities": ["not_real.included"], }, ) tests = [ FilterTest("climate.excluded", False), FilterTest("fake.included", True), FilterTest("test.excluded_test", False), FilterTest("test.included_test", True), FilterTest("not_real.included", True), FilterTest("not_real.excluded", False), ] for test in tests: event = make_event(test.id) handler_method(event) was_called = mock_client.labels.call_count == 1 assert test.should_pass == was_called mock_client.labels.reset_mock() @pytest.mark.usefixtures("mock_bus") async def test_denylist(hass, mock_client): """Test a denylist only config.""" handler_method = await _setup( hass, { "exclude_domains": ["fake"], "exclude_entity_globs": ["test.excluded_*"], "exclude_entities": ["not_real.excluded"], }, ) tests = [ FilterTest("fake.excluded", False), FilterTest("light.included", True), FilterTest("test.excluded_test", False), FilterTest("test.included_test", True), FilterTest("not_real.included", True), FilterTest("not_real.excluded", False), ] for test in tests: event = make_event(test.id) handler_method(event) was_called = mock_client.labels.call_count == 1 assert test.should_pass == was_called mock_client.labels.reset_mock() @pytest.mark.usefixtures("mock_bus") async def test_filtered_denylist(hass, mock_client): """Test a denylist config with a filtering allowlist.""" handler_method = await _setup( hass, { "include_entities": ["fake.included", "test.excluded_test"], "exclude_domains": ["fake"], "exclude_entity_globs": ["*.excluded_*"], "exclude_entities": ["not_real.excluded"], }, ) tests = [ FilterTest("fake.excluded", False), FilterTest("fake.included", True), FilterTest("alt_fake.excluded_test", False), FilterTest("test.excluded_test", True), FilterTest("not_real.excluded", False), FilterTest("not_real.included", True), ] for test in tests: event = make_event(test.id) handler_method(event) was_called = mock_client.labels.call_count == 1 assert test.should_pass == was_called mock_client.labels.reset_mock()
tchellomello/home-assistant
tests/components/prometheus/test_init.py
homeassistant/components/vera/switch.py
"""Support for Genius Hub switch/outlet devices.""" from homeassistant.components.switch import DEVICE_CLASS_OUTLET, SwitchEntity from homeassistant.helpers.typing import ConfigType, HomeAssistantType from . import DOMAIN, GeniusZone ATTR_DURATION = "duration" GH_ON_OFF_ZONE = "on / off" async def async_setup_platform( hass: HomeAssistantType, config: ConfigType, async_add_entities, discovery_info=None ) -> None: """Set up the Genius Hub switch entities.""" if discovery_info is None: return broker = hass.data[DOMAIN]["broker"] async_add_entities( [ GeniusSwitch(broker, z) for z in broker.client.zone_objs if z.data["type"] == GH_ON_OFF_ZONE ] ) class GeniusSwitch(GeniusZone, SwitchEntity): """Representation of a Genius Hub switch.""" @property def device_class(self): """Return the class of this device, from component DEVICE_CLASSES.""" return DEVICE_CLASS_OUTLET @property def is_on(self) -> bool: """Return the current state of the on/off zone. The zone is considered 'on' if & only if it is override/on (e.g. timer/on is 'off'). """ return self._zone.data["mode"] == "override" and self._zone.data["setpoint"] async def async_turn_off(self, **kwargs) -> None: """Send the zone to Timer mode. The zone is deemed 'off' in this mode, although the plugs may actually be on. """ await self._zone.set_mode("timer") async def async_turn_on(self, **kwargs) -> None: """Set the zone to override/on ({'setpoint': true}) for x seconds.""" await self._zone.set_override(1, kwargs.get(ATTR_DURATION, 3600))
"""The tests for the Prometheus exporter.""" from dataclasses import dataclass import datetime import pytest from homeassistant.components import climate, humidifier, sensor from homeassistant.components.demo.sensor import DemoSensor import homeassistant.components.prometheus as prometheus from homeassistant.const import ( CONCENTRATION_MICROGRAMS_PER_CUBIC_METER, DEGREE, DEVICE_CLASS_POWER, ENERGY_KILO_WATT_HOUR, EVENT_STATE_CHANGED, ) from homeassistant.core import split_entity_id from homeassistant.setup import async_setup_component from homeassistant.util import dt as dt_util import tests.async_mock as mock PROMETHEUS_PATH = "homeassistant.components.prometheus" @dataclass class FilterTest: """Class for capturing a filter test.""" id: str should_pass: bool async def prometheus_client(hass, hass_client): """Initialize an hass_client with Prometheus component.""" await async_setup_component(hass, prometheus.DOMAIN, {prometheus.DOMAIN: {}}) await async_setup_component(hass, sensor.DOMAIN, {"sensor": [{"platform": "demo"}]}) await async_setup_component( hass, climate.DOMAIN, {"climate": [{"platform": "demo"}]} ) await hass.async_block_till_done() await async_setup_component( hass, humidifier.DOMAIN, {"humidifier": [{"platform": "demo"}]} ) sensor1 = DemoSensor( None, "Television Energy", 74, None, ENERGY_KILO_WATT_HOUR, None ) sensor1.hass = hass sensor1.entity_id = "sensor.television_energy" await sensor1.async_update_ha_state() sensor2 = DemoSensor( None, "Radio Energy", 14, DEVICE_CLASS_POWER, ENERGY_KILO_WATT_HOUR, None ) sensor2.hass = hass sensor2.entity_id = "sensor.radio_energy" with mock.patch( "homeassistant.util.dt.utcnow", return_value=datetime.datetime(1970, 1, 2, tzinfo=dt_util.UTC), ): await sensor2.async_update_ha_state() sensor3 = DemoSensor( None, "Electricity price", 0.123, None, f"SEK/{ENERGY_KILO_WATT_HOUR}", None ) sensor3.hass = hass sensor3.entity_id = "sensor.electricity_price" await sensor3.async_update_ha_state() sensor4 = DemoSensor(None, "Wind Direction", 25, None, DEGREE, None) sensor4.hass = hass sensor4.entity_id = "sensor.wind_direction" await sensor4.async_update_ha_state() sensor5 = DemoSensor( None, "SPS30 PM <1µm Weight concentration", 3.7069, None, CONCENTRATION_MICROGRAMS_PER_CUBIC_METER, None, ) sensor5.hass = hass sensor5.entity_id = "sensor.sps30_pm_1um_weight_concentration" await sensor5.async_update_ha_state() return await hass_client() async def test_view(hass, hass_client): """Test prometheus metrics view.""" client = await prometheus_client(hass, hass_client) resp = await client.get(prometheus.API_ENDPOINT) assert resp.status == 200 assert resp.headers["content-type"] == "text/plain" body = await resp.text() body = body.split("\n") assert len(body) > 3 assert "# HELP python_info Python platform information" in body assert ( "# HELP python_gc_objects_collected_total " "Objects collected during gc" in body ) assert ( 'temperature_c{domain="sensor",' 'entity="sensor.outside_temperature",' 'friendly_name="Outside Temperature"} 15.6' in body ) assert ( 'battery_level_percent{domain="sensor",' 'entity="sensor.outside_temperature",' 'friendly_name="Outside Temperature"} 12.0' in body ) assert ( 'current_temperature_c{domain="climate",' 'entity="climate.heatpump",' 'friendly_name="HeatPump"} 25.0' in body ) assert ( 'humidifier_target_humidity_percent{domain="humidifier",' 'entity="humidifier.humidifier",' 'friendly_name="Humidifier"} 68.0' in body ) assert ( 'humidifier_state{domain="humidifier",' 'entity="humidifier.dehumidifier",' 'friendly_name="Dehumidifier"} 1.0' in body ) assert ( 'humidifier_mode{domain="humidifier",' 'entity="humidifier.hygrostat",' 'friendly_name="Hygrostat",' 'mode="home"} 1.0' in body ) assert ( 'humidifier_mode{domain="humidifier",' 'entity="humidifier.hygrostat",' 'friendly_name="Hygrostat",' 'mode="eco"} 0.0' in body ) assert ( 'humidity_percent{domain="sensor",' 'entity="sensor.outside_humidity",' 'friendly_name="Outside Humidity"} 54.0' in body ) assert ( 'sensor_unit_kwh{domain="sensor",' 'entity="sensor.television_energy",' 'friendly_name="Television Energy"} 74.0' in body ) assert ( 'power_kwh{domain="sensor",' 'entity="sensor.radio_energy",' 'friendly_name="Radio Energy"} 14.0' in body ) assert ( 'entity_available{domain="sensor",' 'entity="sensor.radio_energy",' 'friendly_name="Radio Energy"} 1.0' in body ) assert ( 'last_updated_time_seconds{domain="sensor",' 'entity="sensor.radio_energy",' 'friendly_name="Radio Energy"} 86400.0' in body ) assert ( 'sensor_unit_sek_per_kwh{domain="sensor",' 'entity="sensor.electricity_price",' 'friendly_name="Electricity price"} 0.123' in body ) assert ( 'sensor_unit_u0xb0{domain="sensor",' 'entity="sensor.wind_direction",' 'friendly_name="Wind Direction"} 25.0' in body ) assert ( 'sensor_unit_u0xb5g_per_mu0xb3{domain="sensor",' 'entity="sensor.sps30_pm_1um_weight_concentration",' 'friendly_name="SPS30 PM <1µm Weight concentration"} 3.7069' in body ) @pytest.fixture(name="mock_client") def mock_client_fixture(): """Mock the prometheus client.""" with mock.patch(f"{PROMETHEUS_PATH}.prometheus_client") as client: counter_client = mock.MagicMock() client.Counter = mock.MagicMock(return_value=counter_client) setattr(counter_client, "labels", mock.MagicMock(return_value=mock.MagicMock())) yield counter_client @pytest.fixture def mock_bus(hass): """Mock the event bus listener.""" hass.bus.listen = mock.MagicMock() @pytest.mark.usefixtures("mock_bus") async def test_minimal_config(hass, mock_client): """Test the minimal config and defaults of component.""" config = {prometheus.DOMAIN: {}} assert await async_setup_component(hass, prometheus.DOMAIN, config) await hass.async_block_till_done() assert hass.bus.listen.called assert EVENT_STATE_CHANGED == hass.bus.listen.call_args_list[0][0][0] @pytest.mark.usefixtures("mock_bus") async def test_full_config(hass, mock_client): """Test the full config of component.""" config = { prometheus.DOMAIN: { "namespace": "ns", "default_metric": "m", "override_metric": "m", "component_config": {"fake.test": {"override_metric": "km"}}, "component_config_glob": {"fake.time_*": {"override_metric": "h"}}, "component_config_domain": {"climate": {"override_metric": "°C"}}, "filter": { "include_domains": ["climate"], "include_entity_globs": ["fake.time_*"], "include_entities": ["fake.test"], "exclude_domains": ["script"], "exclude_entity_globs": ["climate.excluded_*"], "exclude_entities": ["fake.time_excluded"], }, } } assert await async_setup_component(hass, prometheus.DOMAIN, config) await hass.async_block_till_done() assert hass.bus.listen.called assert EVENT_STATE_CHANGED == hass.bus.listen.call_args_list[0][0][0] def make_event(entity_id): """Make a mock event for test.""" domain = split_entity_id(entity_id)[0] state = mock.MagicMock( state="not blank", domain=domain, entity_id=entity_id, object_id="entity", attributes={}, ) return mock.MagicMock(data={"new_state": state}, time_fired=12345) async def _setup(hass, filter_config): """Shared set up for filtering tests.""" config = {prometheus.DOMAIN: {"filter": filter_config}} assert await async_setup_component(hass, prometheus.DOMAIN, config) await hass.async_block_till_done() return hass.bus.listen.call_args_list[0][0][1] @pytest.mark.usefixtures("mock_bus") async def test_allowlist(hass, mock_client): """Test an allowlist only config.""" handler_method = await _setup( hass, { "include_domains": ["fake"], "include_entity_globs": ["test.included_*"], "include_entities": ["not_real.included"], }, ) tests = [ FilterTest("climate.excluded", False), FilterTest("fake.included", True), FilterTest("test.excluded_test", False), FilterTest("test.included_test", True), FilterTest("not_real.included", True), FilterTest("not_real.excluded", False), ] for test in tests: event = make_event(test.id) handler_method(event) was_called = mock_client.labels.call_count == 1 assert test.should_pass == was_called mock_client.labels.reset_mock() @pytest.mark.usefixtures("mock_bus") async def test_denylist(hass, mock_client): """Test a denylist only config.""" handler_method = await _setup( hass, { "exclude_domains": ["fake"], "exclude_entity_globs": ["test.excluded_*"], "exclude_entities": ["not_real.excluded"], }, ) tests = [ FilterTest("fake.excluded", False), FilterTest("light.included", True), FilterTest("test.excluded_test", False), FilterTest("test.included_test", True), FilterTest("not_real.included", True), FilterTest("not_real.excluded", False), ] for test in tests: event = make_event(test.id) handler_method(event) was_called = mock_client.labels.call_count == 1 assert test.should_pass == was_called mock_client.labels.reset_mock() @pytest.mark.usefixtures("mock_bus") async def test_filtered_denylist(hass, mock_client): """Test a denylist config with a filtering allowlist.""" handler_method = await _setup( hass, { "include_entities": ["fake.included", "test.excluded_test"], "exclude_domains": ["fake"], "exclude_entity_globs": ["*.excluded_*"], "exclude_entities": ["not_real.excluded"], }, ) tests = [ FilterTest("fake.excluded", False), FilterTest("fake.included", True), FilterTest("alt_fake.excluded_test", False), FilterTest("test.excluded_test", True), FilterTest("not_real.excluded", False), FilterTest("not_real.included", True), ] for test in tests: event = make_event(test.id) handler_method(event) was_called = mock_client.labels.call_count == 1 assert test.should_pass == was_called mock_client.labels.reset_mock()
tchellomello/home-assistant
tests/components/prometheus/test_init.py
homeassistant/components/geniushub/switch.py
"""Provide configuration end points for Groups.""" from homeassistant.components.group import DOMAIN, GROUP_SCHEMA from homeassistant.config import GROUP_CONFIG_PATH from homeassistant.const import SERVICE_RELOAD import homeassistant.helpers.config_validation as cv from . import EditKeyBasedConfigView async def async_setup(hass): """Set up the Group config API.""" async def hook(action, config_key): """post_write_hook for Config View that reloads groups.""" await hass.services.async_call(DOMAIN, SERVICE_RELOAD) hass.http.register_view( EditKeyBasedConfigView( "group", "config", GROUP_CONFIG_PATH, cv.slug, GROUP_SCHEMA, post_write_hook=hook, ) ) return True
"""The tests for the Prometheus exporter.""" from dataclasses import dataclass import datetime import pytest from homeassistant.components import climate, humidifier, sensor from homeassistant.components.demo.sensor import DemoSensor import homeassistant.components.prometheus as prometheus from homeassistant.const import ( CONCENTRATION_MICROGRAMS_PER_CUBIC_METER, DEGREE, DEVICE_CLASS_POWER, ENERGY_KILO_WATT_HOUR, EVENT_STATE_CHANGED, ) from homeassistant.core import split_entity_id from homeassistant.setup import async_setup_component from homeassistant.util import dt as dt_util import tests.async_mock as mock PROMETHEUS_PATH = "homeassistant.components.prometheus" @dataclass class FilterTest: """Class for capturing a filter test.""" id: str should_pass: bool async def prometheus_client(hass, hass_client): """Initialize an hass_client with Prometheus component.""" await async_setup_component(hass, prometheus.DOMAIN, {prometheus.DOMAIN: {}}) await async_setup_component(hass, sensor.DOMAIN, {"sensor": [{"platform": "demo"}]}) await async_setup_component( hass, climate.DOMAIN, {"climate": [{"platform": "demo"}]} ) await hass.async_block_till_done() await async_setup_component( hass, humidifier.DOMAIN, {"humidifier": [{"platform": "demo"}]} ) sensor1 = DemoSensor( None, "Television Energy", 74, None, ENERGY_KILO_WATT_HOUR, None ) sensor1.hass = hass sensor1.entity_id = "sensor.television_energy" await sensor1.async_update_ha_state() sensor2 = DemoSensor( None, "Radio Energy", 14, DEVICE_CLASS_POWER, ENERGY_KILO_WATT_HOUR, None ) sensor2.hass = hass sensor2.entity_id = "sensor.radio_energy" with mock.patch( "homeassistant.util.dt.utcnow", return_value=datetime.datetime(1970, 1, 2, tzinfo=dt_util.UTC), ): await sensor2.async_update_ha_state() sensor3 = DemoSensor( None, "Electricity price", 0.123, None, f"SEK/{ENERGY_KILO_WATT_HOUR}", None ) sensor3.hass = hass sensor3.entity_id = "sensor.electricity_price" await sensor3.async_update_ha_state() sensor4 = DemoSensor(None, "Wind Direction", 25, None, DEGREE, None) sensor4.hass = hass sensor4.entity_id = "sensor.wind_direction" await sensor4.async_update_ha_state() sensor5 = DemoSensor( None, "SPS30 PM <1µm Weight concentration", 3.7069, None, CONCENTRATION_MICROGRAMS_PER_CUBIC_METER, None, ) sensor5.hass = hass sensor5.entity_id = "sensor.sps30_pm_1um_weight_concentration" await sensor5.async_update_ha_state() return await hass_client() async def test_view(hass, hass_client): """Test prometheus metrics view.""" client = await prometheus_client(hass, hass_client) resp = await client.get(prometheus.API_ENDPOINT) assert resp.status == 200 assert resp.headers["content-type"] == "text/plain" body = await resp.text() body = body.split("\n") assert len(body) > 3 assert "# HELP python_info Python platform information" in body assert ( "# HELP python_gc_objects_collected_total " "Objects collected during gc" in body ) assert ( 'temperature_c{domain="sensor",' 'entity="sensor.outside_temperature",' 'friendly_name="Outside Temperature"} 15.6' in body ) assert ( 'battery_level_percent{domain="sensor",' 'entity="sensor.outside_temperature",' 'friendly_name="Outside Temperature"} 12.0' in body ) assert ( 'current_temperature_c{domain="climate",' 'entity="climate.heatpump",' 'friendly_name="HeatPump"} 25.0' in body ) assert ( 'humidifier_target_humidity_percent{domain="humidifier",' 'entity="humidifier.humidifier",' 'friendly_name="Humidifier"} 68.0' in body ) assert ( 'humidifier_state{domain="humidifier",' 'entity="humidifier.dehumidifier",' 'friendly_name="Dehumidifier"} 1.0' in body ) assert ( 'humidifier_mode{domain="humidifier",' 'entity="humidifier.hygrostat",' 'friendly_name="Hygrostat",' 'mode="home"} 1.0' in body ) assert ( 'humidifier_mode{domain="humidifier",' 'entity="humidifier.hygrostat",' 'friendly_name="Hygrostat",' 'mode="eco"} 0.0' in body ) assert ( 'humidity_percent{domain="sensor",' 'entity="sensor.outside_humidity",' 'friendly_name="Outside Humidity"} 54.0' in body ) assert ( 'sensor_unit_kwh{domain="sensor",' 'entity="sensor.television_energy",' 'friendly_name="Television Energy"} 74.0' in body ) assert ( 'power_kwh{domain="sensor",' 'entity="sensor.radio_energy",' 'friendly_name="Radio Energy"} 14.0' in body ) assert ( 'entity_available{domain="sensor",' 'entity="sensor.radio_energy",' 'friendly_name="Radio Energy"} 1.0' in body ) assert ( 'last_updated_time_seconds{domain="sensor",' 'entity="sensor.radio_energy",' 'friendly_name="Radio Energy"} 86400.0' in body ) assert ( 'sensor_unit_sek_per_kwh{domain="sensor",' 'entity="sensor.electricity_price",' 'friendly_name="Electricity price"} 0.123' in body ) assert ( 'sensor_unit_u0xb0{domain="sensor",' 'entity="sensor.wind_direction",' 'friendly_name="Wind Direction"} 25.0' in body ) assert ( 'sensor_unit_u0xb5g_per_mu0xb3{domain="sensor",' 'entity="sensor.sps30_pm_1um_weight_concentration",' 'friendly_name="SPS30 PM <1µm Weight concentration"} 3.7069' in body ) @pytest.fixture(name="mock_client") def mock_client_fixture(): """Mock the prometheus client.""" with mock.patch(f"{PROMETHEUS_PATH}.prometheus_client") as client: counter_client = mock.MagicMock() client.Counter = mock.MagicMock(return_value=counter_client) setattr(counter_client, "labels", mock.MagicMock(return_value=mock.MagicMock())) yield counter_client @pytest.fixture def mock_bus(hass): """Mock the event bus listener.""" hass.bus.listen = mock.MagicMock() @pytest.mark.usefixtures("mock_bus") async def test_minimal_config(hass, mock_client): """Test the minimal config and defaults of component.""" config = {prometheus.DOMAIN: {}} assert await async_setup_component(hass, prometheus.DOMAIN, config) await hass.async_block_till_done() assert hass.bus.listen.called assert EVENT_STATE_CHANGED == hass.bus.listen.call_args_list[0][0][0] @pytest.mark.usefixtures("mock_bus") async def test_full_config(hass, mock_client): """Test the full config of component.""" config = { prometheus.DOMAIN: { "namespace": "ns", "default_metric": "m", "override_metric": "m", "component_config": {"fake.test": {"override_metric": "km"}}, "component_config_glob": {"fake.time_*": {"override_metric": "h"}}, "component_config_domain": {"climate": {"override_metric": "°C"}}, "filter": { "include_domains": ["climate"], "include_entity_globs": ["fake.time_*"], "include_entities": ["fake.test"], "exclude_domains": ["script"], "exclude_entity_globs": ["climate.excluded_*"], "exclude_entities": ["fake.time_excluded"], }, } } assert await async_setup_component(hass, prometheus.DOMAIN, config) await hass.async_block_till_done() assert hass.bus.listen.called assert EVENT_STATE_CHANGED == hass.bus.listen.call_args_list[0][0][0] def make_event(entity_id): """Make a mock event for test.""" domain = split_entity_id(entity_id)[0] state = mock.MagicMock( state="not blank", domain=domain, entity_id=entity_id, object_id="entity", attributes={}, ) return mock.MagicMock(data={"new_state": state}, time_fired=12345) async def _setup(hass, filter_config): """Shared set up for filtering tests.""" config = {prometheus.DOMAIN: {"filter": filter_config}} assert await async_setup_component(hass, prometheus.DOMAIN, config) await hass.async_block_till_done() return hass.bus.listen.call_args_list[0][0][1] @pytest.mark.usefixtures("mock_bus") async def test_allowlist(hass, mock_client): """Test an allowlist only config.""" handler_method = await _setup( hass, { "include_domains": ["fake"], "include_entity_globs": ["test.included_*"], "include_entities": ["not_real.included"], }, ) tests = [ FilterTest("climate.excluded", False), FilterTest("fake.included", True), FilterTest("test.excluded_test", False), FilterTest("test.included_test", True), FilterTest("not_real.included", True), FilterTest("not_real.excluded", False), ] for test in tests: event = make_event(test.id) handler_method(event) was_called = mock_client.labels.call_count == 1 assert test.should_pass == was_called mock_client.labels.reset_mock() @pytest.mark.usefixtures("mock_bus") async def test_denylist(hass, mock_client): """Test a denylist only config.""" handler_method = await _setup( hass, { "exclude_domains": ["fake"], "exclude_entity_globs": ["test.excluded_*"], "exclude_entities": ["not_real.excluded"], }, ) tests = [ FilterTest("fake.excluded", False), FilterTest("light.included", True), FilterTest("test.excluded_test", False), FilterTest("test.included_test", True), FilterTest("not_real.included", True), FilterTest("not_real.excluded", False), ] for test in tests: event = make_event(test.id) handler_method(event) was_called = mock_client.labels.call_count == 1 assert test.should_pass == was_called mock_client.labels.reset_mock() @pytest.mark.usefixtures("mock_bus") async def test_filtered_denylist(hass, mock_client): """Test a denylist config with a filtering allowlist.""" handler_method = await _setup( hass, { "include_entities": ["fake.included", "test.excluded_test"], "exclude_domains": ["fake"], "exclude_entity_globs": ["*.excluded_*"], "exclude_entities": ["not_real.excluded"], }, ) tests = [ FilterTest("fake.excluded", False), FilterTest("fake.included", True), FilterTest("alt_fake.excluded_test", False), FilterTest("test.excluded_test", True), FilterTest("not_real.excluded", False), FilterTest("not_real.included", True), ] for test in tests: event = make_event(test.id) handler_method(event) was_called = mock_client.labels.call_count == 1 assert test.should_pass == was_called mock_client.labels.reset_mock()
tchellomello/home-assistant
tests/components/prometheus/test_init.py
homeassistant/components/config/group.py
"""Support for LCN climate control.""" import pypck from homeassistant.components.climate import ClimateEntity, const from homeassistant.const import ATTR_TEMPERATURE, CONF_ADDRESS, CONF_UNIT_OF_MEASUREMENT from . import LcnDevice from .const import ( CONF_CONNECTIONS, CONF_LOCKABLE, CONF_MAX_TEMP, CONF_MIN_TEMP, CONF_SETPOINT, CONF_SOURCE, DATA_LCN, ) from .helpers import get_connection async def async_setup_platform( hass, hass_config, async_add_entities, discovery_info=None ): """Set up the LCN climate platform.""" if discovery_info is None: return devices = [] for config in discovery_info: address, connection_id = config[CONF_ADDRESS] addr = pypck.lcn_addr.LcnAddr(*address) connections = hass.data[DATA_LCN][CONF_CONNECTIONS] connection = get_connection(connections, connection_id) address_connection = connection.get_address_conn(addr) devices.append(LcnClimate(config, address_connection)) async_add_entities(devices) class LcnClimate(LcnDevice, ClimateEntity): """Representation of a LCN climate device.""" def __init__(self, config, address_connection): """Initialize of a LCN climate device.""" super().__init__(config, address_connection) self.variable = pypck.lcn_defs.Var[config[CONF_SOURCE]] self.setpoint = pypck.lcn_defs.Var[config[CONF_SETPOINT]] self.unit = pypck.lcn_defs.VarUnit.parse(config[CONF_UNIT_OF_MEASUREMENT]) self.regulator_id = pypck.lcn_defs.Var.to_set_point_id(self.setpoint) self.is_lockable = config[CONF_LOCKABLE] self._max_temp = config[CONF_MAX_TEMP] self._min_temp = config[CONF_MIN_TEMP] self._current_temperature = None self._target_temperature = None self._is_on = None async def async_added_to_hass(self): """Run when entity about to be added to hass.""" await super().async_added_to_hass() await self.address_connection.activate_status_request_handler(self.variable) await self.address_connection.activate_status_request_handler(self.setpoint) @property def supported_features(self): """Return the list of supported features.""" return const.SUPPORT_TARGET_TEMPERATURE @property def temperature_unit(self): """Return the unit of measurement.""" return self.unit.value @property def current_temperature(self): """Return the current temperature.""" return self._current_temperature @property def target_temperature(self): """Return the temperature we try to reach.""" return self._target_temperature @property def hvac_mode(self): """Return hvac operation ie. heat, cool mode. Need to be one of HVAC_MODE_*. """ if self._is_on: return const.HVAC_MODE_HEAT return const.HVAC_MODE_OFF @property def hvac_modes(self): """Return the list of available hvac operation modes. Need to be a subset of HVAC_MODES. """ modes = [const.HVAC_MODE_HEAT] if self.is_lockable: modes.append(const.HVAC_MODE_OFF) return modes @property def max_temp(self): """Return the maximum temperature.""" return self._max_temp @property def min_temp(self): """Return the minimum temperature.""" return self._min_temp async def async_set_hvac_mode(self, hvac_mode): """Set new target hvac mode.""" if hvac_mode == const.HVAC_MODE_HEAT: self._is_on = True self.address_connection.lock_regulator(self.regulator_id, False) elif hvac_mode == const.HVAC_MODE_OFF: self._is_on = False self.address_connection.lock_regulator(self.regulator_id, True) self._target_temperature = None self.async_write_ha_state() async def async_set_temperature(self, **kwargs): """Set new target temperature.""" temperature = kwargs.get(ATTR_TEMPERATURE) if temperature is None: return self._target_temperature = temperature self.address_connection.var_abs( self.setpoint, self._target_temperature, self.unit ) self.async_write_ha_state() def input_received(self, input_obj): """Set temperature value when LCN input object is received.""" if not isinstance(input_obj, pypck.inputs.ModStatusVar): return if input_obj.get_var() == self.variable: self._current_temperature = input_obj.get_value().to_var_unit(self.unit) elif input_obj.get_var() == self.setpoint: self._is_on = not input_obj.get_value().is_locked_regulator() if self._is_on: self._target_temperature = input_obj.get_value().to_var_unit(self.unit) self.async_write_ha_state()
"""The tests for the Prometheus exporter.""" from dataclasses import dataclass import datetime import pytest from homeassistant.components import climate, humidifier, sensor from homeassistant.components.demo.sensor import DemoSensor import homeassistant.components.prometheus as prometheus from homeassistant.const import ( CONCENTRATION_MICROGRAMS_PER_CUBIC_METER, DEGREE, DEVICE_CLASS_POWER, ENERGY_KILO_WATT_HOUR, EVENT_STATE_CHANGED, ) from homeassistant.core import split_entity_id from homeassistant.setup import async_setup_component from homeassistant.util import dt as dt_util import tests.async_mock as mock PROMETHEUS_PATH = "homeassistant.components.prometheus" @dataclass class FilterTest: """Class for capturing a filter test.""" id: str should_pass: bool async def prometheus_client(hass, hass_client): """Initialize an hass_client with Prometheus component.""" await async_setup_component(hass, prometheus.DOMAIN, {prometheus.DOMAIN: {}}) await async_setup_component(hass, sensor.DOMAIN, {"sensor": [{"platform": "demo"}]}) await async_setup_component( hass, climate.DOMAIN, {"climate": [{"platform": "demo"}]} ) await hass.async_block_till_done() await async_setup_component( hass, humidifier.DOMAIN, {"humidifier": [{"platform": "demo"}]} ) sensor1 = DemoSensor( None, "Television Energy", 74, None, ENERGY_KILO_WATT_HOUR, None ) sensor1.hass = hass sensor1.entity_id = "sensor.television_energy" await sensor1.async_update_ha_state() sensor2 = DemoSensor( None, "Radio Energy", 14, DEVICE_CLASS_POWER, ENERGY_KILO_WATT_HOUR, None ) sensor2.hass = hass sensor2.entity_id = "sensor.radio_energy" with mock.patch( "homeassistant.util.dt.utcnow", return_value=datetime.datetime(1970, 1, 2, tzinfo=dt_util.UTC), ): await sensor2.async_update_ha_state() sensor3 = DemoSensor( None, "Electricity price", 0.123, None, f"SEK/{ENERGY_KILO_WATT_HOUR}", None ) sensor3.hass = hass sensor3.entity_id = "sensor.electricity_price" await sensor3.async_update_ha_state() sensor4 = DemoSensor(None, "Wind Direction", 25, None, DEGREE, None) sensor4.hass = hass sensor4.entity_id = "sensor.wind_direction" await sensor4.async_update_ha_state() sensor5 = DemoSensor( None, "SPS30 PM <1µm Weight concentration", 3.7069, None, CONCENTRATION_MICROGRAMS_PER_CUBIC_METER, None, ) sensor5.hass = hass sensor5.entity_id = "sensor.sps30_pm_1um_weight_concentration" await sensor5.async_update_ha_state() return await hass_client() async def test_view(hass, hass_client): """Test prometheus metrics view.""" client = await prometheus_client(hass, hass_client) resp = await client.get(prometheus.API_ENDPOINT) assert resp.status == 200 assert resp.headers["content-type"] == "text/plain" body = await resp.text() body = body.split("\n") assert len(body) > 3 assert "# HELP python_info Python platform information" in body assert ( "# HELP python_gc_objects_collected_total " "Objects collected during gc" in body ) assert ( 'temperature_c{domain="sensor",' 'entity="sensor.outside_temperature",' 'friendly_name="Outside Temperature"} 15.6' in body ) assert ( 'battery_level_percent{domain="sensor",' 'entity="sensor.outside_temperature",' 'friendly_name="Outside Temperature"} 12.0' in body ) assert ( 'current_temperature_c{domain="climate",' 'entity="climate.heatpump",' 'friendly_name="HeatPump"} 25.0' in body ) assert ( 'humidifier_target_humidity_percent{domain="humidifier",' 'entity="humidifier.humidifier",' 'friendly_name="Humidifier"} 68.0' in body ) assert ( 'humidifier_state{domain="humidifier",' 'entity="humidifier.dehumidifier",' 'friendly_name="Dehumidifier"} 1.0' in body ) assert ( 'humidifier_mode{domain="humidifier",' 'entity="humidifier.hygrostat",' 'friendly_name="Hygrostat",' 'mode="home"} 1.0' in body ) assert ( 'humidifier_mode{domain="humidifier",' 'entity="humidifier.hygrostat",' 'friendly_name="Hygrostat",' 'mode="eco"} 0.0' in body ) assert ( 'humidity_percent{domain="sensor",' 'entity="sensor.outside_humidity",' 'friendly_name="Outside Humidity"} 54.0' in body ) assert ( 'sensor_unit_kwh{domain="sensor",' 'entity="sensor.television_energy",' 'friendly_name="Television Energy"} 74.0' in body ) assert ( 'power_kwh{domain="sensor",' 'entity="sensor.radio_energy",' 'friendly_name="Radio Energy"} 14.0' in body ) assert ( 'entity_available{domain="sensor",' 'entity="sensor.radio_energy",' 'friendly_name="Radio Energy"} 1.0' in body ) assert ( 'last_updated_time_seconds{domain="sensor",' 'entity="sensor.radio_energy",' 'friendly_name="Radio Energy"} 86400.0' in body ) assert ( 'sensor_unit_sek_per_kwh{domain="sensor",' 'entity="sensor.electricity_price",' 'friendly_name="Electricity price"} 0.123' in body ) assert ( 'sensor_unit_u0xb0{domain="sensor",' 'entity="sensor.wind_direction",' 'friendly_name="Wind Direction"} 25.0' in body ) assert ( 'sensor_unit_u0xb5g_per_mu0xb3{domain="sensor",' 'entity="sensor.sps30_pm_1um_weight_concentration",' 'friendly_name="SPS30 PM <1µm Weight concentration"} 3.7069' in body ) @pytest.fixture(name="mock_client") def mock_client_fixture(): """Mock the prometheus client.""" with mock.patch(f"{PROMETHEUS_PATH}.prometheus_client") as client: counter_client = mock.MagicMock() client.Counter = mock.MagicMock(return_value=counter_client) setattr(counter_client, "labels", mock.MagicMock(return_value=mock.MagicMock())) yield counter_client @pytest.fixture def mock_bus(hass): """Mock the event bus listener.""" hass.bus.listen = mock.MagicMock() @pytest.mark.usefixtures("mock_bus") async def test_minimal_config(hass, mock_client): """Test the minimal config and defaults of component.""" config = {prometheus.DOMAIN: {}} assert await async_setup_component(hass, prometheus.DOMAIN, config) await hass.async_block_till_done() assert hass.bus.listen.called assert EVENT_STATE_CHANGED == hass.bus.listen.call_args_list[0][0][0] @pytest.mark.usefixtures("mock_bus") async def test_full_config(hass, mock_client): """Test the full config of component.""" config = { prometheus.DOMAIN: { "namespace": "ns", "default_metric": "m", "override_metric": "m", "component_config": {"fake.test": {"override_metric": "km"}}, "component_config_glob": {"fake.time_*": {"override_metric": "h"}}, "component_config_domain": {"climate": {"override_metric": "°C"}}, "filter": { "include_domains": ["climate"], "include_entity_globs": ["fake.time_*"], "include_entities": ["fake.test"], "exclude_domains": ["script"], "exclude_entity_globs": ["climate.excluded_*"], "exclude_entities": ["fake.time_excluded"], }, } } assert await async_setup_component(hass, prometheus.DOMAIN, config) await hass.async_block_till_done() assert hass.bus.listen.called assert EVENT_STATE_CHANGED == hass.bus.listen.call_args_list[0][0][0] def make_event(entity_id): """Make a mock event for test.""" domain = split_entity_id(entity_id)[0] state = mock.MagicMock( state="not blank", domain=domain, entity_id=entity_id, object_id="entity", attributes={}, ) return mock.MagicMock(data={"new_state": state}, time_fired=12345) async def _setup(hass, filter_config): """Shared set up for filtering tests.""" config = {prometheus.DOMAIN: {"filter": filter_config}} assert await async_setup_component(hass, prometheus.DOMAIN, config) await hass.async_block_till_done() return hass.bus.listen.call_args_list[0][0][1] @pytest.mark.usefixtures("mock_bus") async def test_allowlist(hass, mock_client): """Test an allowlist only config.""" handler_method = await _setup( hass, { "include_domains": ["fake"], "include_entity_globs": ["test.included_*"], "include_entities": ["not_real.included"], }, ) tests = [ FilterTest("climate.excluded", False), FilterTest("fake.included", True), FilterTest("test.excluded_test", False), FilterTest("test.included_test", True), FilterTest("not_real.included", True), FilterTest("not_real.excluded", False), ] for test in tests: event = make_event(test.id) handler_method(event) was_called = mock_client.labels.call_count == 1 assert test.should_pass == was_called mock_client.labels.reset_mock() @pytest.mark.usefixtures("mock_bus") async def test_denylist(hass, mock_client): """Test a denylist only config.""" handler_method = await _setup( hass, { "exclude_domains": ["fake"], "exclude_entity_globs": ["test.excluded_*"], "exclude_entities": ["not_real.excluded"], }, ) tests = [ FilterTest("fake.excluded", False), FilterTest("light.included", True), FilterTest("test.excluded_test", False), FilterTest("test.included_test", True), FilterTest("not_real.included", True), FilterTest("not_real.excluded", False), ] for test in tests: event = make_event(test.id) handler_method(event) was_called = mock_client.labels.call_count == 1 assert test.should_pass == was_called mock_client.labels.reset_mock() @pytest.mark.usefixtures("mock_bus") async def test_filtered_denylist(hass, mock_client): """Test a denylist config with a filtering allowlist.""" handler_method = await _setup( hass, { "include_entities": ["fake.included", "test.excluded_test"], "exclude_domains": ["fake"], "exclude_entity_globs": ["*.excluded_*"], "exclude_entities": ["not_real.excluded"], }, ) tests = [ FilterTest("fake.excluded", False), FilterTest("fake.included", True), FilterTest("alt_fake.excluded_test", False), FilterTest("test.excluded_test", True), FilterTest("not_real.excluded", False), FilterTest("not_real.included", True), ] for test in tests: event = make_event(test.id) handler_method(event) was_called = mock_client.labels.call_count == 1 assert test.should_pass == was_called mock_client.labels.reset_mock()
tchellomello/home-assistant
tests/components/prometheus/test_init.py
homeassistant/components/lcn/climate.py
"""Support for HomeMatic binary sensors.""" import logging from homeassistant.components.binary_sensor import ( DEVICE_CLASS_BATTERY, DEVICE_CLASS_MOTION, DEVICE_CLASS_OPENING, DEVICE_CLASS_PRESENCE, DEVICE_CLASS_SMOKE, BinarySensorEntity, ) from .const import ATTR_DISCOVER_DEVICES, ATTR_DISCOVERY_TYPE, DISCOVER_BATTERY from .entity import HMDevice _LOGGER = logging.getLogger(__name__) SENSOR_TYPES_CLASS = { "IPShutterContact": DEVICE_CLASS_OPENING, "IPShutterContactSabotage": DEVICE_CLASS_OPENING, "MaxShutterContact": DEVICE_CLASS_OPENING, "Motion": DEVICE_CLASS_MOTION, "MotionV2": DEVICE_CLASS_MOTION, "PresenceIP": DEVICE_CLASS_PRESENCE, "Remote": None, "RemoteMotion": None, "ShutterContact": DEVICE_CLASS_OPENING, "Smoke": DEVICE_CLASS_SMOKE, "SmokeV2": DEVICE_CLASS_SMOKE, "TiltSensor": None, "WeatherSensor": None, "IPContact": DEVICE_CLASS_OPENING, "MotionIPV2": DEVICE_CLASS_MOTION, "IPRemoteMotionV2": DEVICE_CLASS_MOTION, } def setup_platform(hass, config, add_entities, discovery_info=None): """Set up the HomeMatic binary sensor platform.""" if discovery_info is None: return devices = [] for conf in discovery_info[ATTR_DISCOVER_DEVICES]: if discovery_info[ATTR_DISCOVERY_TYPE] == DISCOVER_BATTERY: devices.append(HMBatterySensor(conf)) else: devices.append(HMBinarySensor(conf)) add_entities(devices, True) class HMBinarySensor(HMDevice, BinarySensorEntity): """Representation of a binary HomeMatic device.""" @property def is_on(self): """Return true if switch is on.""" if not self.available: return False return bool(self._hm_get_state()) @property def device_class(self): """Return the class of this sensor from DEVICE_CLASSES.""" # If state is MOTION (Only RemoteMotion working) if self._state == "MOTION": return DEVICE_CLASS_MOTION return SENSOR_TYPES_CLASS.get(self._hmdevice.__class__.__name__) def _init_data_struct(self): """Generate the data dictionary (self._data) from metadata.""" # Add state to data struct if self._state: self._data.update({self._state: None}) class HMBatterySensor(HMDevice, BinarySensorEntity): """Representation of an HomeMatic low battery sensor.""" @property def device_class(self): """Return battery as a device class.""" return DEVICE_CLASS_BATTERY @property def is_on(self): """Return True if battery is low.""" return bool(self._hm_get_state()) def _init_data_struct(self): """Generate the data dictionary (self._data) from metadata.""" # Add state to data struct if self._state: self._data.update({self._state: None})
"""The tests for the Prometheus exporter.""" from dataclasses import dataclass import datetime import pytest from homeassistant.components import climate, humidifier, sensor from homeassistant.components.demo.sensor import DemoSensor import homeassistant.components.prometheus as prometheus from homeassistant.const import ( CONCENTRATION_MICROGRAMS_PER_CUBIC_METER, DEGREE, DEVICE_CLASS_POWER, ENERGY_KILO_WATT_HOUR, EVENT_STATE_CHANGED, ) from homeassistant.core import split_entity_id from homeassistant.setup import async_setup_component from homeassistant.util import dt as dt_util import tests.async_mock as mock PROMETHEUS_PATH = "homeassistant.components.prometheus" @dataclass class FilterTest: """Class for capturing a filter test.""" id: str should_pass: bool async def prometheus_client(hass, hass_client): """Initialize an hass_client with Prometheus component.""" await async_setup_component(hass, prometheus.DOMAIN, {prometheus.DOMAIN: {}}) await async_setup_component(hass, sensor.DOMAIN, {"sensor": [{"platform": "demo"}]}) await async_setup_component( hass, climate.DOMAIN, {"climate": [{"platform": "demo"}]} ) await hass.async_block_till_done() await async_setup_component( hass, humidifier.DOMAIN, {"humidifier": [{"platform": "demo"}]} ) sensor1 = DemoSensor( None, "Television Energy", 74, None, ENERGY_KILO_WATT_HOUR, None ) sensor1.hass = hass sensor1.entity_id = "sensor.television_energy" await sensor1.async_update_ha_state() sensor2 = DemoSensor( None, "Radio Energy", 14, DEVICE_CLASS_POWER, ENERGY_KILO_WATT_HOUR, None ) sensor2.hass = hass sensor2.entity_id = "sensor.radio_energy" with mock.patch( "homeassistant.util.dt.utcnow", return_value=datetime.datetime(1970, 1, 2, tzinfo=dt_util.UTC), ): await sensor2.async_update_ha_state() sensor3 = DemoSensor( None, "Electricity price", 0.123, None, f"SEK/{ENERGY_KILO_WATT_HOUR}", None ) sensor3.hass = hass sensor3.entity_id = "sensor.electricity_price" await sensor3.async_update_ha_state() sensor4 = DemoSensor(None, "Wind Direction", 25, None, DEGREE, None) sensor4.hass = hass sensor4.entity_id = "sensor.wind_direction" await sensor4.async_update_ha_state() sensor5 = DemoSensor( None, "SPS30 PM <1µm Weight concentration", 3.7069, None, CONCENTRATION_MICROGRAMS_PER_CUBIC_METER, None, ) sensor5.hass = hass sensor5.entity_id = "sensor.sps30_pm_1um_weight_concentration" await sensor5.async_update_ha_state() return await hass_client() async def test_view(hass, hass_client): """Test prometheus metrics view.""" client = await prometheus_client(hass, hass_client) resp = await client.get(prometheus.API_ENDPOINT) assert resp.status == 200 assert resp.headers["content-type"] == "text/plain" body = await resp.text() body = body.split("\n") assert len(body) > 3 assert "# HELP python_info Python platform information" in body assert ( "# HELP python_gc_objects_collected_total " "Objects collected during gc" in body ) assert ( 'temperature_c{domain="sensor",' 'entity="sensor.outside_temperature",' 'friendly_name="Outside Temperature"} 15.6' in body ) assert ( 'battery_level_percent{domain="sensor",' 'entity="sensor.outside_temperature",' 'friendly_name="Outside Temperature"} 12.0' in body ) assert ( 'current_temperature_c{domain="climate",' 'entity="climate.heatpump",' 'friendly_name="HeatPump"} 25.0' in body ) assert ( 'humidifier_target_humidity_percent{domain="humidifier",' 'entity="humidifier.humidifier",' 'friendly_name="Humidifier"} 68.0' in body ) assert ( 'humidifier_state{domain="humidifier",' 'entity="humidifier.dehumidifier",' 'friendly_name="Dehumidifier"} 1.0' in body ) assert ( 'humidifier_mode{domain="humidifier",' 'entity="humidifier.hygrostat",' 'friendly_name="Hygrostat",' 'mode="home"} 1.0' in body ) assert ( 'humidifier_mode{domain="humidifier",' 'entity="humidifier.hygrostat",' 'friendly_name="Hygrostat",' 'mode="eco"} 0.0' in body ) assert ( 'humidity_percent{domain="sensor",' 'entity="sensor.outside_humidity",' 'friendly_name="Outside Humidity"} 54.0' in body ) assert ( 'sensor_unit_kwh{domain="sensor",' 'entity="sensor.television_energy",' 'friendly_name="Television Energy"} 74.0' in body ) assert ( 'power_kwh{domain="sensor",' 'entity="sensor.radio_energy",' 'friendly_name="Radio Energy"} 14.0' in body ) assert ( 'entity_available{domain="sensor",' 'entity="sensor.radio_energy",' 'friendly_name="Radio Energy"} 1.0' in body ) assert ( 'last_updated_time_seconds{domain="sensor",' 'entity="sensor.radio_energy",' 'friendly_name="Radio Energy"} 86400.0' in body ) assert ( 'sensor_unit_sek_per_kwh{domain="sensor",' 'entity="sensor.electricity_price",' 'friendly_name="Electricity price"} 0.123' in body ) assert ( 'sensor_unit_u0xb0{domain="sensor",' 'entity="sensor.wind_direction",' 'friendly_name="Wind Direction"} 25.0' in body ) assert ( 'sensor_unit_u0xb5g_per_mu0xb3{domain="sensor",' 'entity="sensor.sps30_pm_1um_weight_concentration",' 'friendly_name="SPS30 PM <1µm Weight concentration"} 3.7069' in body ) @pytest.fixture(name="mock_client") def mock_client_fixture(): """Mock the prometheus client.""" with mock.patch(f"{PROMETHEUS_PATH}.prometheus_client") as client: counter_client = mock.MagicMock() client.Counter = mock.MagicMock(return_value=counter_client) setattr(counter_client, "labels", mock.MagicMock(return_value=mock.MagicMock())) yield counter_client @pytest.fixture def mock_bus(hass): """Mock the event bus listener.""" hass.bus.listen = mock.MagicMock() @pytest.mark.usefixtures("mock_bus") async def test_minimal_config(hass, mock_client): """Test the minimal config and defaults of component.""" config = {prometheus.DOMAIN: {}} assert await async_setup_component(hass, prometheus.DOMAIN, config) await hass.async_block_till_done() assert hass.bus.listen.called assert EVENT_STATE_CHANGED == hass.bus.listen.call_args_list[0][0][0] @pytest.mark.usefixtures("mock_bus") async def test_full_config(hass, mock_client): """Test the full config of component.""" config = { prometheus.DOMAIN: { "namespace": "ns", "default_metric": "m", "override_metric": "m", "component_config": {"fake.test": {"override_metric": "km"}}, "component_config_glob": {"fake.time_*": {"override_metric": "h"}}, "component_config_domain": {"climate": {"override_metric": "°C"}}, "filter": { "include_domains": ["climate"], "include_entity_globs": ["fake.time_*"], "include_entities": ["fake.test"], "exclude_domains": ["script"], "exclude_entity_globs": ["climate.excluded_*"], "exclude_entities": ["fake.time_excluded"], }, } } assert await async_setup_component(hass, prometheus.DOMAIN, config) await hass.async_block_till_done() assert hass.bus.listen.called assert EVENT_STATE_CHANGED == hass.bus.listen.call_args_list[0][0][0] def make_event(entity_id): """Make a mock event for test.""" domain = split_entity_id(entity_id)[0] state = mock.MagicMock( state="not blank", domain=domain, entity_id=entity_id, object_id="entity", attributes={}, ) return mock.MagicMock(data={"new_state": state}, time_fired=12345) async def _setup(hass, filter_config): """Shared set up for filtering tests.""" config = {prometheus.DOMAIN: {"filter": filter_config}} assert await async_setup_component(hass, prometheus.DOMAIN, config) await hass.async_block_till_done() return hass.bus.listen.call_args_list[0][0][1] @pytest.mark.usefixtures("mock_bus") async def test_allowlist(hass, mock_client): """Test an allowlist only config.""" handler_method = await _setup( hass, { "include_domains": ["fake"], "include_entity_globs": ["test.included_*"], "include_entities": ["not_real.included"], }, ) tests = [ FilterTest("climate.excluded", False), FilterTest("fake.included", True), FilterTest("test.excluded_test", False), FilterTest("test.included_test", True), FilterTest("not_real.included", True), FilterTest("not_real.excluded", False), ] for test in tests: event = make_event(test.id) handler_method(event) was_called = mock_client.labels.call_count == 1 assert test.should_pass == was_called mock_client.labels.reset_mock() @pytest.mark.usefixtures("mock_bus") async def test_denylist(hass, mock_client): """Test a denylist only config.""" handler_method = await _setup( hass, { "exclude_domains": ["fake"], "exclude_entity_globs": ["test.excluded_*"], "exclude_entities": ["not_real.excluded"], }, ) tests = [ FilterTest("fake.excluded", False), FilterTest("light.included", True), FilterTest("test.excluded_test", False), FilterTest("test.included_test", True), FilterTest("not_real.included", True), FilterTest("not_real.excluded", False), ] for test in tests: event = make_event(test.id) handler_method(event) was_called = mock_client.labels.call_count == 1 assert test.should_pass == was_called mock_client.labels.reset_mock() @pytest.mark.usefixtures("mock_bus") async def test_filtered_denylist(hass, mock_client): """Test a denylist config with a filtering allowlist.""" handler_method = await _setup( hass, { "include_entities": ["fake.included", "test.excluded_test"], "exclude_domains": ["fake"], "exclude_entity_globs": ["*.excluded_*"], "exclude_entities": ["not_real.excluded"], }, ) tests = [ FilterTest("fake.excluded", False), FilterTest("fake.included", True), FilterTest("alt_fake.excluded_test", False), FilterTest("test.excluded_test", True), FilterTest("not_real.excluded", False), FilterTest("not_real.included", True), ] for test in tests: event = make_event(test.id) handler_method(event) was_called = mock_client.labels.call_count == 1 assert test.should_pass == was_called mock_client.labels.reset_mock()
tchellomello/home-assistant
tests/components/prometheus/test_init.py
homeassistant/components/homematic/binary_sensor.py
"""Support for an Intergas heater via an InComfort/InTouch Lan2RF gateway.""" from typing import Any, Dict, Optional from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN from homeassistant.const import ( DEVICE_CLASS_PRESSURE, DEVICE_CLASS_TEMPERATURE, PRESSURE_BAR, TEMP_CELSIUS, ) from homeassistant.util import slugify from . import DOMAIN, IncomfortChild INCOMFORT_HEATER_TEMP = "CV Temp" INCOMFORT_PRESSURE = "CV Pressure" INCOMFORT_TAP_TEMP = "Tap Temp" INCOMFORT_MAP_ATTRS = { INCOMFORT_HEATER_TEMP: ["heater_temp", "is_pumping"], INCOMFORT_PRESSURE: ["pressure", None], INCOMFORT_TAP_TEMP: ["tap_temp", "is_tapping"], } async def async_setup_platform(hass, config, async_add_entities, discovery_info=None): """Set up an InComfort/InTouch sensor device.""" if discovery_info is None: return client = hass.data[DOMAIN]["client"] heaters = hass.data[DOMAIN]["heaters"] async_add_entities( [IncomfortPressure(client, h, INCOMFORT_PRESSURE) for h in heaters] + [IncomfortTemperature(client, h, INCOMFORT_HEATER_TEMP) for h in heaters] + [IncomfortTemperature(client, h, INCOMFORT_TAP_TEMP) for h in heaters] ) class IncomfortSensor(IncomfortChild): """Representation of an InComfort/InTouch sensor device.""" def __init__(self, client, heater, name) -> None: """Initialize the sensor.""" super().__init__() self._client = client self._heater = heater self._unique_id = f"{heater.serial_no}_{slugify(name)}" self.entity_id = f"{SENSOR_DOMAIN}.{DOMAIN}_{slugify(name)}" self._name = f"Boiler {name}" self._device_class = None self._state_attr = INCOMFORT_MAP_ATTRS[name][0] self._unit_of_measurement = None @property def state(self) -> Optional[str]: """Return the state of the sensor.""" return self._heater.status[self._state_attr] @property def device_class(self) -> Optional[str]: """Return the device class of the sensor.""" return self._device_class @property def unit_of_measurement(self) -> Optional[str]: """Return the unit of measurement of the sensor.""" return self._unit_of_measurement class IncomfortPressure(IncomfortSensor): """Representation of an InTouch CV Pressure sensor.""" def __init__(self, client, heater, name) -> None: """Initialize the sensor.""" super().__init__(client, heater, name) self._device_class = DEVICE_CLASS_PRESSURE self._unit_of_measurement = PRESSURE_BAR class IncomfortTemperature(IncomfortSensor): """Representation of an InTouch Temperature sensor.""" def __init__(self, client, heater, name) -> None: """Initialize the signal strength sensor.""" super().__init__(client, heater, name) self._attr = INCOMFORT_MAP_ATTRS[name][1] self._device_class = DEVICE_CLASS_TEMPERATURE self._unit_of_measurement = TEMP_CELSIUS @property def device_state_attributes(self) -> Optional[Dict[str, Any]]: """Return the device state attributes.""" return {self._attr: self._heater.status[self._attr]}
"""The tests for the Prometheus exporter.""" from dataclasses import dataclass import datetime import pytest from homeassistant.components import climate, humidifier, sensor from homeassistant.components.demo.sensor import DemoSensor import homeassistant.components.prometheus as prometheus from homeassistant.const import ( CONCENTRATION_MICROGRAMS_PER_CUBIC_METER, DEGREE, DEVICE_CLASS_POWER, ENERGY_KILO_WATT_HOUR, EVENT_STATE_CHANGED, ) from homeassistant.core import split_entity_id from homeassistant.setup import async_setup_component from homeassistant.util import dt as dt_util import tests.async_mock as mock PROMETHEUS_PATH = "homeassistant.components.prometheus" @dataclass class FilterTest: """Class for capturing a filter test.""" id: str should_pass: bool async def prometheus_client(hass, hass_client): """Initialize an hass_client with Prometheus component.""" await async_setup_component(hass, prometheus.DOMAIN, {prometheus.DOMAIN: {}}) await async_setup_component(hass, sensor.DOMAIN, {"sensor": [{"platform": "demo"}]}) await async_setup_component( hass, climate.DOMAIN, {"climate": [{"platform": "demo"}]} ) await hass.async_block_till_done() await async_setup_component( hass, humidifier.DOMAIN, {"humidifier": [{"platform": "demo"}]} ) sensor1 = DemoSensor( None, "Television Energy", 74, None, ENERGY_KILO_WATT_HOUR, None ) sensor1.hass = hass sensor1.entity_id = "sensor.television_energy" await sensor1.async_update_ha_state() sensor2 = DemoSensor( None, "Radio Energy", 14, DEVICE_CLASS_POWER, ENERGY_KILO_WATT_HOUR, None ) sensor2.hass = hass sensor2.entity_id = "sensor.radio_energy" with mock.patch( "homeassistant.util.dt.utcnow", return_value=datetime.datetime(1970, 1, 2, tzinfo=dt_util.UTC), ): await sensor2.async_update_ha_state() sensor3 = DemoSensor( None, "Electricity price", 0.123, None, f"SEK/{ENERGY_KILO_WATT_HOUR}", None ) sensor3.hass = hass sensor3.entity_id = "sensor.electricity_price" await sensor3.async_update_ha_state() sensor4 = DemoSensor(None, "Wind Direction", 25, None, DEGREE, None) sensor4.hass = hass sensor4.entity_id = "sensor.wind_direction" await sensor4.async_update_ha_state() sensor5 = DemoSensor( None, "SPS30 PM <1µm Weight concentration", 3.7069, None, CONCENTRATION_MICROGRAMS_PER_CUBIC_METER, None, ) sensor5.hass = hass sensor5.entity_id = "sensor.sps30_pm_1um_weight_concentration" await sensor5.async_update_ha_state() return await hass_client() async def test_view(hass, hass_client): """Test prometheus metrics view.""" client = await prometheus_client(hass, hass_client) resp = await client.get(prometheus.API_ENDPOINT) assert resp.status == 200 assert resp.headers["content-type"] == "text/plain" body = await resp.text() body = body.split("\n") assert len(body) > 3 assert "# HELP python_info Python platform information" in body assert ( "# HELP python_gc_objects_collected_total " "Objects collected during gc" in body ) assert ( 'temperature_c{domain="sensor",' 'entity="sensor.outside_temperature",' 'friendly_name="Outside Temperature"} 15.6' in body ) assert ( 'battery_level_percent{domain="sensor",' 'entity="sensor.outside_temperature",' 'friendly_name="Outside Temperature"} 12.0' in body ) assert ( 'current_temperature_c{domain="climate",' 'entity="climate.heatpump",' 'friendly_name="HeatPump"} 25.0' in body ) assert ( 'humidifier_target_humidity_percent{domain="humidifier",' 'entity="humidifier.humidifier",' 'friendly_name="Humidifier"} 68.0' in body ) assert ( 'humidifier_state{domain="humidifier",' 'entity="humidifier.dehumidifier",' 'friendly_name="Dehumidifier"} 1.0' in body ) assert ( 'humidifier_mode{domain="humidifier",' 'entity="humidifier.hygrostat",' 'friendly_name="Hygrostat",' 'mode="home"} 1.0' in body ) assert ( 'humidifier_mode{domain="humidifier",' 'entity="humidifier.hygrostat",' 'friendly_name="Hygrostat",' 'mode="eco"} 0.0' in body ) assert ( 'humidity_percent{domain="sensor",' 'entity="sensor.outside_humidity",' 'friendly_name="Outside Humidity"} 54.0' in body ) assert ( 'sensor_unit_kwh{domain="sensor",' 'entity="sensor.television_energy",' 'friendly_name="Television Energy"} 74.0' in body ) assert ( 'power_kwh{domain="sensor",' 'entity="sensor.radio_energy",' 'friendly_name="Radio Energy"} 14.0' in body ) assert ( 'entity_available{domain="sensor",' 'entity="sensor.radio_energy",' 'friendly_name="Radio Energy"} 1.0' in body ) assert ( 'last_updated_time_seconds{domain="sensor",' 'entity="sensor.radio_energy",' 'friendly_name="Radio Energy"} 86400.0' in body ) assert ( 'sensor_unit_sek_per_kwh{domain="sensor",' 'entity="sensor.electricity_price",' 'friendly_name="Electricity price"} 0.123' in body ) assert ( 'sensor_unit_u0xb0{domain="sensor",' 'entity="sensor.wind_direction",' 'friendly_name="Wind Direction"} 25.0' in body ) assert ( 'sensor_unit_u0xb5g_per_mu0xb3{domain="sensor",' 'entity="sensor.sps30_pm_1um_weight_concentration",' 'friendly_name="SPS30 PM <1µm Weight concentration"} 3.7069' in body ) @pytest.fixture(name="mock_client") def mock_client_fixture(): """Mock the prometheus client.""" with mock.patch(f"{PROMETHEUS_PATH}.prometheus_client") as client: counter_client = mock.MagicMock() client.Counter = mock.MagicMock(return_value=counter_client) setattr(counter_client, "labels", mock.MagicMock(return_value=mock.MagicMock())) yield counter_client @pytest.fixture def mock_bus(hass): """Mock the event bus listener.""" hass.bus.listen = mock.MagicMock() @pytest.mark.usefixtures("mock_bus") async def test_minimal_config(hass, mock_client): """Test the minimal config and defaults of component.""" config = {prometheus.DOMAIN: {}} assert await async_setup_component(hass, prometheus.DOMAIN, config) await hass.async_block_till_done() assert hass.bus.listen.called assert EVENT_STATE_CHANGED == hass.bus.listen.call_args_list[0][0][0] @pytest.mark.usefixtures("mock_bus") async def test_full_config(hass, mock_client): """Test the full config of component.""" config = { prometheus.DOMAIN: { "namespace": "ns", "default_metric": "m", "override_metric": "m", "component_config": {"fake.test": {"override_metric": "km"}}, "component_config_glob": {"fake.time_*": {"override_metric": "h"}}, "component_config_domain": {"climate": {"override_metric": "°C"}}, "filter": { "include_domains": ["climate"], "include_entity_globs": ["fake.time_*"], "include_entities": ["fake.test"], "exclude_domains": ["script"], "exclude_entity_globs": ["climate.excluded_*"], "exclude_entities": ["fake.time_excluded"], }, } } assert await async_setup_component(hass, prometheus.DOMAIN, config) await hass.async_block_till_done() assert hass.bus.listen.called assert EVENT_STATE_CHANGED == hass.bus.listen.call_args_list[0][0][0] def make_event(entity_id): """Make a mock event for test.""" domain = split_entity_id(entity_id)[0] state = mock.MagicMock( state="not blank", domain=domain, entity_id=entity_id, object_id="entity", attributes={}, ) return mock.MagicMock(data={"new_state": state}, time_fired=12345) async def _setup(hass, filter_config): """Shared set up for filtering tests.""" config = {prometheus.DOMAIN: {"filter": filter_config}} assert await async_setup_component(hass, prometheus.DOMAIN, config) await hass.async_block_till_done() return hass.bus.listen.call_args_list[0][0][1] @pytest.mark.usefixtures("mock_bus") async def test_allowlist(hass, mock_client): """Test an allowlist only config.""" handler_method = await _setup( hass, { "include_domains": ["fake"], "include_entity_globs": ["test.included_*"], "include_entities": ["not_real.included"], }, ) tests = [ FilterTest("climate.excluded", False), FilterTest("fake.included", True), FilterTest("test.excluded_test", False), FilterTest("test.included_test", True), FilterTest("not_real.included", True), FilterTest("not_real.excluded", False), ] for test in tests: event = make_event(test.id) handler_method(event) was_called = mock_client.labels.call_count == 1 assert test.should_pass == was_called mock_client.labels.reset_mock() @pytest.mark.usefixtures("mock_bus") async def test_denylist(hass, mock_client): """Test a denylist only config.""" handler_method = await _setup( hass, { "exclude_domains": ["fake"], "exclude_entity_globs": ["test.excluded_*"], "exclude_entities": ["not_real.excluded"], }, ) tests = [ FilterTest("fake.excluded", False), FilterTest("light.included", True), FilterTest("test.excluded_test", False), FilterTest("test.included_test", True), FilterTest("not_real.included", True), FilterTest("not_real.excluded", False), ] for test in tests: event = make_event(test.id) handler_method(event) was_called = mock_client.labels.call_count == 1 assert test.should_pass == was_called mock_client.labels.reset_mock() @pytest.mark.usefixtures("mock_bus") async def test_filtered_denylist(hass, mock_client): """Test a denylist config with a filtering allowlist.""" handler_method = await _setup( hass, { "include_entities": ["fake.included", "test.excluded_test"], "exclude_domains": ["fake"], "exclude_entity_globs": ["*.excluded_*"], "exclude_entities": ["not_real.excluded"], }, ) tests = [ FilterTest("fake.excluded", False), FilterTest("fake.included", True), FilterTest("alt_fake.excluded_test", False), FilterTest("test.excluded_test", True), FilterTest("not_real.excluded", False), FilterTest("not_real.included", True), ] for test in tests: event = make_event(test.id) handler_method(event) was_called = mock_client.labels.call_count == 1 assert test.should_pass == was_called mock_client.labels.reset_mock()
tchellomello/home-assistant
tests/components/prometheus/test_init.py
homeassistant/components/incomfort/sensor.py
"""Support for Satel Integra devices.""" import collections import logging from satel_integra.satel_integra import AsyncSatel import voluptuous as vol from homeassistant.const import CONF_HOST, CONF_PORT, EVENT_HOMEASSISTANT_STOP from homeassistant.core import callback from homeassistant.helpers import config_validation as cv from homeassistant.helpers.discovery import async_load_platform from homeassistant.helpers.dispatcher import async_dispatcher_send DEFAULT_ALARM_NAME = "satel_integra" DEFAULT_PORT = 7094 DEFAULT_CONF_ARM_HOME_MODE = 1 DEFAULT_DEVICE_PARTITION = 1 DEFAULT_ZONE_TYPE = "motion" _LOGGER = logging.getLogger(__name__) DOMAIN = "satel_integra" DATA_SATEL = "satel_integra" CONF_DEVICE_CODE = "code" CONF_DEVICE_PARTITIONS = "partitions" CONF_ARM_HOME_MODE = "arm_home_mode" CONF_ZONE_NAME = "name" CONF_ZONE_TYPE = "type" CONF_ZONES = "zones" CONF_OUTPUTS = "outputs" CONF_SWITCHABLE_OUTPUTS = "switchable_outputs" ZONES = "zones" SIGNAL_PANEL_MESSAGE = "satel_integra.panel_message" SIGNAL_PANEL_ARM_AWAY = "satel_integra.panel_arm_away" SIGNAL_PANEL_ARM_HOME = "satel_integra.panel_arm_home" SIGNAL_PANEL_DISARM = "satel_integra.panel_disarm" SIGNAL_ZONES_UPDATED = "satel_integra.zones_updated" SIGNAL_OUTPUTS_UPDATED = "satel_integra.outputs_updated" ZONE_SCHEMA = vol.Schema( { vol.Required(CONF_ZONE_NAME): cv.string, vol.Optional(CONF_ZONE_TYPE, default=DEFAULT_ZONE_TYPE): cv.string, } ) EDITABLE_OUTPUT_SCHEMA = vol.Schema({vol.Required(CONF_ZONE_NAME): cv.string}) PARTITION_SCHEMA = vol.Schema( { vol.Required(CONF_ZONE_NAME): cv.string, vol.Optional(CONF_ARM_HOME_MODE, default=DEFAULT_CONF_ARM_HOME_MODE): vol.In( [1, 2, 3] ), } ) def is_alarm_code_necessary(value): """Check if alarm code must be configured.""" if value.get(CONF_SWITCHABLE_OUTPUTS) and CONF_DEVICE_CODE not in value: raise vol.Invalid("You need to specify alarm code to use switchable_outputs") return value CONFIG_SCHEMA = vol.Schema( { DOMAIN: vol.All( { vol.Required(CONF_HOST): cv.string, vol.Optional(CONF_PORT, default=DEFAULT_PORT): cv.port, vol.Optional(CONF_DEVICE_CODE): cv.string, vol.Optional(CONF_DEVICE_PARTITIONS, default={}): { vol.Coerce(int): PARTITION_SCHEMA }, vol.Optional(CONF_ZONES, default={}): {vol.Coerce(int): ZONE_SCHEMA}, vol.Optional(CONF_OUTPUTS, default={}): {vol.Coerce(int): ZONE_SCHEMA}, vol.Optional(CONF_SWITCHABLE_OUTPUTS, default={}): { vol.Coerce(int): EDITABLE_OUTPUT_SCHEMA }, }, is_alarm_code_necessary, ) }, extra=vol.ALLOW_EXTRA, ) async def async_setup(hass, config): """Set up the Satel Integra component.""" conf = config.get(DOMAIN) zones = conf.get(CONF_ZONES) outputs = conf.get(CONF_OUTPUTS) switchable_outputs = conf.get(CONF_SWITCHABLE_OUTPUTS) host = conf.get(CONF_HOST) port = conf.get(CONF_PORT) partitions = conf.get(CONF_DEVICE_PARTITIONS) monitored_outputs = collections.OrderedDict( list(outputs.items()) + list(switchable_outputs.items()) ) controller = AsyncSatel(host, port, hass.loop, zones, monitored_outputs, partitions) hass.data[DATA_SATEL] = controller result = await controller.connect() if not result: return False async def _close(): controller.close() hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, _close()) _LOGGER.debug("Arm home config: %s, mode: %s ", conf, conf.get(CONF_ARM_HOME_MODE)) hass.async_create_task( async_load_platform(hass, "alarm_control_panel", DOMAIN, conf, config) ) hass.async_create_task( async_load_platform( hass, "binary_sensor", DOMAIN, {CONF_ZONES: zones, CONF_OUTPUTS: outputs}, config, ) ) hass.async_create_task( async_load_platform( hass, "switch", DOMAIN, { CONF_SWITCHABLE_OUTPUTS: switchable_outputs, CONF_DEVICE_CODE: conf.get(CONF_DEVICE_CODE), }, config, ) ) @callback def alarm_status_update_callback(): """Send status update received from alarm to Home Assistant.""" _LOGGER.debug("Sending request to update panel state") async_dispatcher_send(hass, SIGNAL_PANEL_MESSAGE) @callback def zones_update_callback(status): """Update zone objects as per notification from the alarm.""" _LOGGER.debug("Zones callback, status: %s", status) async_dispatcher_send(hass, SIGNAL_ZONES_UPDATED, status[ZONES]) @callback def outputs_update_callback(status): """Update zone objects as per notification from the alarm.""" _LOGGER.debug("Outputs updated callback , status: %s", status) async_dispatcher_send(hass, SIGNAL_OUTPUTS_UPDATED, status["outputs"]) # Create a task instead of adding a tracking job, since this task will # run until the connection to satel_integra is closed. hass.loop.create_task(controller.keep_alive()) hass.loop.create_task( controller.monitor_status( alarm_status_update_callback, zones_update_callback, outputs_update_callback ) ) return True
"""The tests for the Prometheus exporter.""" from dataclasses import dataclass import datetime import pytest from homeassistant.components import climate, humidifier, sensor from homeassistant.components.demo.sensor import DemoSensor import homeassistant.components.prometheus as prometheus from homeassistant.const import ( CONCENTRATION_MICROGRAMS_PER_CUBIC_METER, DEGREE, DEVICE_CLASS_POWER, ENERGY_KILO_WATT_HOUR, EVENT_STATE_CHANGED, ) from homeassistant.core import split_entity_id from homeassistant.setup import async_setup_component from homeassistant.util import dt as dt_util import tests.async_mock as mock PROMETHEUS_PATH = "homeassistant.components.prometheus" @dataclass class FilterTest: """Class for capturing a filter test.""" id: str should_pass: bool async def prometheus_client(hass, hass_client): """Initialize an hass_client with Prometheus component.""" await async_setup_component(hass, prometheus.DOMAIN, {prometheus.DOMAIN: {}}) await async_setup_component(hass, sensor.DOMAIN, {"sensor": [{"platform": "demo"}]}) await async_setup_component( hass, climate.DOMAIN, {"climate": [{"platform": "demo"}]} ) await hass.async_block_till_done() await async_setup_component( hass, humidifier.DOMAIN, {"humidifier": [{"platform": "demo"}]} ) sensor1 = DemoSensor( None, "Television Energy", 74, None, ENERGY_KILO_WATT_HOUR, None ) sensor1.hass = hass sensor1.entity_id = "sensor.television_energy" await sensor1.async_update_ha_state() sensor2 = DemoSensor( None, "Radio Energy", 14, DEVICE_CLASS_POWER, ENERGY_KILO_WATT_HOUR, None ) sensor2.hass = hass sensor2.entity_id = "sensor.radio_energy" with mock.patch( "homeassistant.util.dt.utcnow", return_value=datetime.datetime(1970, 1, 2, tzinfo=dt_util.UTC), ): await sensor2.async_update_ha_state() sensor3 = DemoSensor( None, "Electricity price", 0.123, None, f"SEK/{ENERGY_KILO_WATT_HOUR}", None ) sensor3.hass = hass sensor3.entity_id = "sensor.electricity_price" await sensor3.async_update_ha_state() sensor4 = DemoSensor(None, "Wind Direction", 25, None, DEGREE, None) sensor4.hass = hass sensor4.entity_id = "sensor.wind_direction" await sensor4.async_update_ha_state() sensor5 = DemoSensor( None, "SPS30 PM <1µm Weight concentration", 3.7069, None, CONCENTRATION_MICROGRAMS_PER_CUBIC_METER, None, ) sensor5.hass = hass sensor5.entity_id = "sensor.sps30_pm_1um_weight_concentration" await sensor5.async_update_ha_state() return await hass_client() async def test_view(hass, hass_client): """Test prometheus metrics view.""" client = await prometheus_client(hass, hass_client) resp = await client.get(prometheus.API_ENDPOINT) assert resp.status == 200 assert resp.headers["content-type"] == "text/plain" body = await resp.text() body = body.split("\n") assert len(body) > 3 assert "# HELP python_info Python platform information" in body assert ( "# HELP python_gc_objects_collected_total " "Objects collected during gc" in body ) assert ( 'temperature_c{domain="sensor",' 'entity="sensor.outside_temperature",' 'friendly_name="Outside Temperature"} 15.6' in body ) assert ( 'battery_level_percent{domain="sensor",' 'entity="sensor.outside_temperature",' 'friendly_name="Outside Temperature"} 12.0' in body ) assert ( 'current_temperature_c{domain="climate",' 'entity="climate.heatpump",' 'friendly_name="HeatPump"} 25.0' in body ) assert ( 'humidifier_target_humidity_percent{domain="humidifier",' 'entity="humidifier.humidifier",' 'friendly_name="Humidifier"} 68.0' in body ) assert ( 'humidifier_state{domain="humidifier",' 'entity="humidifier.dehumidifier",' 'friendly_name="Dehumidifier"} 1.0' in body ) assert ( 'humidifier_mode{domain="humidifier",' 'entity="humidifier.hygrostat",' 'friendly_name="Hygrostat",' 'mode="home"} 1.0' in body ) assert ( 'humidifier_mode{domain="humidifier",' 'entity="humidifier.hygrostat",' 'friendly_name="Hygrostat",' 'mode="eco"} 0.0' in body ) assert ( 'humidity_percent{domain="sensor",' 'entity="sensor.outside_humidity",' 'friendly_name="Outside Humidity"} 54.0' in body ) assert ( 'sensor_unit_kwh{domain="sensor",' 'entity="sensor.television_energy",' 'friendly_name="Television Energy"} 74.0' in body ) assert ( 'power_kwh{domain="sensor",' 'entity="sensor.radio_energy",' 'friendly_name="Radio Energy"} 14.0' in body ) assert ( 'entity_available{domain="sensor",' 'entity="sensor.radio_energy",' 'friendly_name="Radio Energy"} 1.0' in body ) assert ( 'last_updated_time_seconds{domain="sensor",' 'entity="sensor.radio_energy",' 'friendly_name="Radio Energy"} 86400.0' in body ) assert ( 'sensor_unit_sek_per_kwh{domain="sensor",' 'entity="sensor.electricity_price",' 'friendly_name="Electricity price"} 0.123' in body ) assert ( 'sensor_unit_u0xb0{domain="sensor",' 'entity="sensor.wind_direction",' 'friendly_name="Wind Direction"} 25.0' in body ) assert ( 'sensor_unit_u0xb5g_per_mu0xb3{domain="sensor",' 'entity="sensor.sps30_pm_1um_weight_concentration",' 'friendly_name="SPS30 PM <1µm Weight concentration"} 3.7069' in body ) @pytest.fixture(name="mock_client") def mock_client_fixture(): """Mock the prometheus client.""" with mock.patch(f"{PROMETHEUS_PATH}.prometheus_client") as client: counter_client = mock.MagicMock() client.Counter = mock.MagicMock(return_value=counter_client) setattr(counter_client, "labels", mock.MagicMock(return_value=mock.MagicMock())) yield counter_client @pytest.fixture def mock_bus(hass): """Mock the event bus listener.""" hass.bus.listen = mock.MagicMock() @pytest.mark.usefixtures("mock_bus") async def test_minimal_config(hass, mock_client): """Test the minimal config and defaults of component.""" config = {prometheus.DOMAIN: {}} assert await async_setup_component(hass, prometheus.DOMAIN, config) await hass.async_block_till_done() assert hass.bus.listen.called assert EVENT_STATE_CHANGED == hass.bus.listen.call_args_list[0][0][0] @pytest.mark.usefixtures("mock_bus") async def test_full_config(hass, mock_client): """Test the full config of component.""" config = { prometheus.DOMAIN: { "namespace": "ns", "default_metric": "m", "override_metric": "m", "component_config": {"fake.test": {"override_metric": "km"}}, "component_config_glob": {"fake.time_*": {"override_metric": "h"}}, "component_config_domain": {"climate": {"override_metric": "°C"}}, "filter": { "include_domains": ["climate"], "include_entity_globs": ["fake.time_*"], "include_entities": ["fake.test"], "exclude_domains": ["script"], "exclude_entity_globs": ["climate.excluded_*"], "exclude_entities": ["fake.time_excluded"], }, } } assert await async_setup_component(hass, prometheus.DOMAIN, config) await hass.async_block_till_done() assert hass.bus.listen.called assert EVENT_STATE_CHANGED == hass.bus.listen.call_args_list[0][0][0] def make_event(entity_id): """Make a mock event for test.""" domain = split_entity_id(entity_id)[0] state = mock.MagicMock( state="not blank", domain=domain, entity_id=entity_id, object_id="entity", attributes={}, ) return mock.MagicMock(data={"new_state": state}, time_fired=12345) async def _setup(hass, filter_config): """Shared set up for filtering tests.""" config = {prometheus.DOMAIN: {"filter": filter_config}} assert await async_setup_component(hass, prometheus.DOMAIN, config) await hass.async_block_till_done() return hass.bus.listen.call_args_list[0][0][1] @pytest.mark.usefixtures("mock_bus") async def test_allowlist(hass, mock_client): """Test an allowlist only config.""" handler_method = await _setup( hass, { "include_domains": ["fake"], "include_entity_globs": ["test.included_*"], "include_entities": ["not_real.included"], }, ) tests = [ FilterTest("climate.excluded", False), FilterTest("fake.included", True), FilterTest("test.excluded_test", False), FilterTest("test.included_test", True), FilterTest("not_real.included", True), FilterTest("not_real.excluded", False), ] for test in tests: event = make_event(test.id) handler_method(event) was_called = mock_client.labels.call_count == 1 assert test.should_pass == was_called mock_client.labels.reset_mock() @pytest.mark.usefixtures("mock_bus") async def test_denylist(hass, mock_client): """Test a denylist only config.""" handler_method = await _setup( hass, { "exclude_domains": ["fake"], "exclude_entity_globs": ["test.excluded_*"], "exclude_entities": ["not_real.excluded"], }, ) tests = [ FilterTest("fake.excluded", False), FilterTest("light.included", True), FilterTest("test.excluded_test", False), FilterTest("test.included_test", True), FilterTest("not_real.included", True), FilterTest("not_real.excluded", False), ] for test in tests: event = make_event(test.id) handler_method(event) was_called = mock_client.labels.call_count == 1 assert test.should_pass == was_called mock_client.labels.reset_mock() @pytest.mark.usefixtures("mock_bus") async def test_filtered_denylist(hass, mock_client): """Test a denylist config with a filtering allowlist.""" handler_method = await _setup( hass, { "include_entities": ["fake.included", "test.excluded_test"], "exclude_domains": ["fake"], "exclude_entity_globs": ["*.excluded_*"], "exclude_entities": ["not_real.excluded"], }, ) tests = [ FilterTest("fake.excluded", False), FilterTest("fake.included", True), FilterTest("alt_fake.excluded_test", False), FilterTest("test.excluded_test", True), FilterTest("not_real.excluded", False), FilterTest("not_real.included", True), ] for test in tests: event = make_event(test.id) handler_method(event) was_called = mock_client.labels.call_count == 1 assert test.should_pass == was_called mock_client.labels.reset_mock()
tchellomello/home-assistant
tests/components/prometheus/test_init.py
homeassistant/components/satel_integra/__init__.py
"""Support for controlling projector via the PJLink protocol.""" import logging from pypjlink import MUTE_AUDIO, Projector from pypjlink.projector import ProjectorError import voluptuous as vol from homeassistant.components.media_player import PLATFORM_SCHEMA, MediaPlayerEntity from homeassistant.components.media_player.const import ( SUPPORT_SELECT_SOURCE, SUPPORT_TURN_OFF, SUPPORT_TURN_ON, SUPPORT_VOLUME_MUTE, ) from homeassistant.const import ( CONF_HOST, CONF_NAME, CONF_PASSWORD, CONF_PORT, STATE_OFF, STATE_ON, ) import homeassistant.helpers.config_validation as cv _LOGGER = logging.getLogger(__name__) CONF_ENCODING = "encoding" DEFAULT_PORT = 4352 DEFAULT_ENCODING = "utf-8" DEFAULT_TIMEOUT = 10 PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( { vol.Required(CONF_HOST): cv.string, vol.Optional(CONF_PORT, default=DEFAULT_PORT): cv.port, vol.Optional(CONF_NAME): cv.string, vol.Optional(CONF_ENCODING, default=DEFAULT_ENCODING): cv.string, vol.Optional(CONF_PASSWORD): cv.string, } ) SUPPORT_PJLINK = ( SUPPORT_VOLUME_MUTE | SUPPORT_TURN_ON | SUPPORT_TURN_OFF | SUPPORT_SELECT_SOURCE ) def setup_platform(hass, config, add_entities, discovery_info=None): """Set up the PJLink platform.""" host = config.get(CONF_HOST) port = config.get(CONF_PORT) name = config.get(CONF_NAME) encoding = config.get(CONF_ENCODING) password = config.get(CONF_PASSWORD) if "pjlink" not in hass.data: hass.data["pjlink"] = {} hass_data = hass.data["pjlink"] device_label = f"{host}:{port}" if device_label in hass_data: return device = PjLinkDevice(host, port, name, encoding, password) hass_data[device_label] = device add_entities([device], True) def format_input_source(input_source_name, input_source_number): """Format input source for display in UI.""" return f"{input_source_name} {input_source_number}" class PjLinkDevice(MediaPlayerEntity): """Representation of a PJLink device.""" def __init__(self, host, port, name, encoding, password): """Iinitialize the PJLink device.""" self._host = host self._port = port self._name = name self._password = password self._encoding = encoding self._muted = False self._pwstate = STATE_OFF self._current_source = None with self.projector() as projector: if not self._name: self._name = projector.get_name() inputs = projector.get_inputs() self._source_name_mapping = {format_input_source(*x): x for x in inputs} self._source_list = sorted(self._source_name_mapping.keys()) def projector(self): """Create PJLink Projector instance.""" projector = Projector.from_address( self._host, self._port, self._encoding, DEFAULT_TIMEOUT ) projector.authenticate(self._password) return projector def update(self): """Get the latest state from the device.""" with self.projector() as projector: try: pwstate = projector.get_power() if pwstate in ("on", "warm-up"): self._pwstate = STATE_ON self._muted = projector.get_mute()[1] self._current_source = format_input_source(*projector.get_input()) else: self._pwstate = STATE_OFF self._muted = False self._current_source = None except KeyError as err: if str(err) == "'OK'": self._pwstate = STATE_OFF self._muted = False self._current_source = None else: raise except ProjectorError as err: if str(err) == "unavailable time": self._pwstate = STATE_OFF self._muted = False self._current_source = None else: raise @property def name(self): """Return the name of the device.""" return self._name @property def state(self): """Return the state of the device.""" return self._pwstate @property def is_volume_muted(self): """Return boolean indicating mute status.""" return self._muted @property def source(self): """Return current input source.""" return self._current_source @property def source_list(self): """Return all available input sources.""" return self._source_list @property def supported_features(self): """Return projector supported features.""" return SUPPORT_PJLINK def turn_off(self): """Turn projector off.""" if self._pwstate == STATE_ON: with self.projector() as projector: projector.set_power("off") def turn_on(self): """Turn projector on.""" if self._pwstate == STATE_OFF: with self.projector() as projector: projector.set_power("on") def mute_volume(self, mute): """Mute (true) of unmute (false) media player.""" with self.projector() as projector: projector.set_mute(MUTE_AUDIO, mute) def select_source(self, source): """Set the input source.""" source = self._source_name_mapping[source] with self.projector() as projector: projector.set_input(*source)
"""The tests for the Prometheus exporter.""" from dataclasses import dataclass import datetime import pytest from homeassistant.components import climate, humidifier, sensor from homeassistant.components.demo.sensor import DemoSensor import homeassistant.components.prometheus as prometheus from homeassistant.const import ( CONCENTRATION_MICROGRAMS_PER_CUBIC_METER, DEGREE, DEVICE_CLASS_POWER, ENERGY_KILO_WATT_HOUR, EVENT_STATE_CHANGED, ) from homeassistant.core import split_entity_id from homeassistant.setup import async_setup_component from homeassistant.util import dt as dt_util import tests.async_mock as mock PROMETHEUS_PATH = "homeassistant.components.prometheus" @dataclass class FilterTest: """Class for capturing a filter test.""" id: str should_pass: bool async def prometheus_client(hass, hass_client): """Initialize an hass_client with Prometheus component.""" await async_setup_component(hass, prometheus.DOMAIN, {prometheus.DOMAIN: {}}) await async_setup_component(hass, sensor.DOMAIN, {"sensor": [{"platform": "demo"}]}) await async_setup_component( hass, climate.DOMAIN, {"climate": [{"platform": "demo"}]} ) await hass.async_block_till_done() await async_setup_component( hass, humidifier.DOMAIN, {"humidifier": [{"platform": "demo"}]} ) sensor1 = DemoSensor( None, "Television Energy", 74, None, ENERGY_KILO_WATT_HOUR, None ) sensor1.hass = hass sensor1.entity_id = "sensor.television_energy" await sensor1.async_update_ha_state() sensor2 = DemoSensor( None, "Radio Energy", 14, DEVICE_CLASS_POWER, ENERGY_KILO_WATT_HOUR, None ) sensor2.hass = hass sensor2.entity_id = "sensor.radio_energy" with mock.patch( "homeassistant.util.dt.utcnow", return_value=datetime.datetime(1970, 1, 2, tzinfo=dt_util.UTC), ): await sensor2.async_update_ha_state() sensor3 = DemoSensor( None, "Electricity price", 0.123, None, f"SEK/{ENERGY_KILO_WATT_HOUR}", None ) sensor3.hass = hass sensor3.entity_id = "sensor.electricity_price" await sensor3.async_update_ha_state() sensor4 = DemoSensor(None, "Wind Direction", 25, None, DEGREE, None) sensor4.hass = hass sensor4.entity_id = "sensor.wind_direction" await sensor4.async_update_ha_state() sensor5 = DemoSensor( None, "SPS30 PM <1µm Weight concentration", 3.7069, None, CONCENTRATION_MICROGRAMS_PER_CUBIC_METER, None, ) sensor5.hass = hass sensor5.entity_id = "sensor.sps30_pm_1um_weight_concentration" await sensor5.async_update_ha_state() return await hass_client() async def test_view(hass, hass_client): """Test prometheus metrics view.""" client = await prometheus_client(hass, hass_client) resp = await client.get(prometheus.API_ENDPOINT) assert resp.status == 200 assert resp.headers["content-type"] == "text/plain" body = await resp.text() body = body.split("\n") assert len(body) > 3 assert "# HELP python_info Python platform information" in body assert ( "# HELP python_gc_objects_collected_total " "Objects collected during gc" in body ) assert ( 'temperature_c{domain="sensor",' 'entity="sensor.outside_temperature",' 'friendly_name="Outside Temperature"} 15.6' in body ) assert ( 'battery_level_percent{domain="sensor",' 'entity="sensor.outside_temperature",' 'friendly_name="Outside Temperature"} 12.0' in body ) assert ( 'current_temperature_c{domain="climate",' 'entity="climate.heatpump",' 'friendly_name="HeatPump"} 25.0' in body ) assert ( 'humidifier_target_humidity_percent{domain="humidifier",' 'entity="humidifier.humidifier",' 'friendly_name="Humidifier"} 68.0' in body ) assert ( 'humidifier_state{domain="humidifier",' 'entity="humidifier.dehumidifier",' 'friendly_name="Dehumidifier"} 1.0' in body ) assert ( 'humidifier_mode{domain="humidifier",' 'entity="humidifier.hygrostat",' 'friendly_name="Hygrostat",' 'mode="home"} 1.0' in body ) assert ( 'humidifier_mode{domain="humidifier",' 'entity="humidifier.hygrostat",' 'friendly_name="Hygrostat",' 'mode="eco"} 0.0' in body ) assert ( 'humidity_percent{domain="sensor",' 'entity="sensor.outside_humidity",' 'friendly_name="Outside Humidity"} 54.0' in body ) assert ( 'sensor_unit_kwh{domain="sensor",' 'entity="sensor.television_energy",' 'friendly_name="Television Energy"} 74.0' in body ) assert ( 'power_kwh{domain="sensor",' 'entity="sensor.radio_energy",' 'friendly_name="Radio Energy"} 14.0' in body ) assert ( 'entity_available{domain="sensor",' 'entity="sensor.radio_energy",' 'friendly_name="Radio Energy"} 1.0' in body ) assert ( 'last_updated_time_seconds{domain="sensor",' 'entity="sensor.radio_energy",' 'friendly_name="Radio Energy"} 86400.0' in body ) assert ( 'sensor_unit_sek_per_kwh{domain="sensor",' 'entity="sensor.electricity_price",' 'friendly_name="Electricity price"} 0.123' in body ) assert ( 'sensor_unit_u0xb0{domain="sensor",' 'entity="sensor.wind_direction",' 'friendly_name="Wind Direction"} 25.0' in body ) assert ( 'sensor_unit_u0xb5g_per_mu0xb3{domain="sensor",' 'entity="sensor.sps30_pm_1um_weight_concentration",' 'friendly_name="SPS30 PM <1µm Weight concentration"} 3.7069' in body ) @pytest.fixture(name="mock_client") def mock_client_fixture(): """Mock the prometheus client.""" with mock.patch(f"{PROMETHEUS_PATH}.prometheus_client") as client: counter_client = mock.MagicMock() client.Counter = mock.MagicMock(return_value=counter_client) setattr(counter_client, "labels", mock.MagicMock(return_value=mock.MagicMock())) yield counter_client @pytest.fixture def mock_bus(hass): """Mock the event bus listener.""" hass.bus.listen = mock.MagicMock() @pytest.mark.usefixtures("mock_bus") async def test_minimal_config(hass, mock_client): """Test the minimal config and defaults of component.""" config = {prometheus.DOMAIN: {}} assert await async_setup_component(hass, prometheus.DOMAIN, config) await hass.async_block_till_done() assert hass.bus.listen.called assert EVENT_STATE_CHANGED == hass.bus.listen.call_args_list[0][0][0] @pytest.mark.usefixtures("mock_bus") async def test_full_config(hass, mock_client): """Test the full config of component.""" config = { prometheus.DOMAIN: { "namespace": "ns", "default_metric": "m", "override_metric": "m", "component_config": {"fake.test": {"override_metric": "km"}}, "component_config_glob": {"fake.time_*": {"override_metric": "h"}}, "component_config_domain": {"climate": {"override_metric": "°C"}}, "filter": { "include_domains": ["climate"], "include_entity_globs": ["fake.time_*"], "include_entities": ["fake.test"], "exclude_domains": ["script"], "exclude_entity_globs": ["climate.excluded_*"], "exclude_entities": ["fake.time_excluded"], }, } } assert await async_setup_component(hass, prometheus.DOMAIN, config) await hass.async_block_till_done() assert hass.bus.listen.called assert EVENT_STATE_CHANGED == hass.bus.listen.call_args_list[0][0][0] def make_event(entity_id): """Make a mock event for test.""" domain = split_entity_id(entity_id)[0] state = mock.MagicMock( state="not blank", domain=domain, entity_id=entity_id, object_id="entity", attributes={}, ) return mock.MagicMock(data={"new_state": state}, time_fired=12345) async def _setup(hass, filter_config): """Shared set up for filtering tests.""" config = {prometheus.DOMAIN: {"filter": filter_config}} assert await async_setup_component(hass, prometheus.DOMAIN, config) await hass.async_block_till_done() return hass.bus.listen.call_args_list[0][0][1] @pytest.mark.usefixtures("mock_bus") async def test_allowlist(hass, mock_client): """Test an allowlist only config.""" handler_method = await _setup( hass, { "include_domains": ["fake"], "include_entity_globs": ["test.included_*"], "include_entities": ["not_real.included"], }, ) tests = [ FilterTest("climate.excluded", False), FilterTest("fake.included", True), FilterTest("test.excluded_test", False), FilterTest("test.included_test", True), FilterTest("not_real.included", True), FilterTest("not_real.excluded", False), ] for test in tests: event = make_event(test.id) handler_method(event) was_called = mock_client.labels.call_count == 1 assert test.should_pass == was_called mock_client.labels.reset_mock() @pytest.mark.usefixtures("mock_bus") async def test_denylist(hass, mock_client): """Test a denylist only config.""" handler_method = await _setup( hass, { "exclude_domains": ["fake"], "exclude_entity_globs": ["test.excluded_*"], "exclude_entities": ["not_real.excluded"], }, ) tests = [ FilterTest("fake.excluded", False), FilterTest("light.included", True), FilterTest("test.excluded_test", False), FilterTest("test.included_test", True), FilterTest("not_real.included", True), FilterTest("not_real.excluded", False), ] for test in tests: event = make_event(test.id) handler_method(event) was_called = mock_client.labels.call_count == 1 assert test.should_pass == was_called mock_client.labels.reset_mock() @pytest.mark.usefixtures("mock_bus") async def test_filtered_denylist(hass, mock_client): """Test a denylist config with a filtering allowlist.""" handler_method = await _setup( hass, { "include_entities": ["fake.included", "test.excluded_test"], "exclude_domains": ["fake"], "exclude_entity_globs": ["*.excluded_*"], "exclude_entities": ["not_real.excluded"], }, ) tests = [ FilterTest("fake.excluded", False), FilterTest("fake.included", True), FilterTest("alt_fake.excluded_test", False), FilterTest("test.excluded_test", True), FilterTest("not_real.excluded", False), FilterTest("not_real.included", True), ] for test in tests: event = make_event(test.id) handler_method(event) was_called = mock_client.labels.call_count == 1 assert test.should_pass == was_called mock_client.labels.reset_mock()
tchellomello/home-assistant
tests/components/prometheus/test_init.py
homeassistant/components/pjlink/media_player.py
"""Support for ADS covers.""" import logging import voluptuous as vol from homeassistant.components.cover import ( ATTR_POSITION, DEVICE_CLASSES_SCHEMA, PLATFORM_SCHEMA, SUPPORT_CLOSE, SUPPORT_OPEN, SUPPORT_SET_POSITION, SUPPORT_STOP, CoverEntity, ) from homeassistant.const import CONF_DEVICE_CLASS, CONF_NAME import homeassistant.helpers.config_validation as cv from . import ( CONF_ADS_VAR, CONF_ADS_VAR_POSITION, DATA_ADS, STATE_KEY_POSITION, STATE_KEY_STATE, AdsEntity, ) _LOGGER = logging.getLogger(__name__) DEFAULT_NAME = "ADS Cover" CONF_ADS_VAR_SET_POS = "adsvar_set_position" CONF_ADS_VAR_OPEN = "adsvar_open" CONF_ADS_VAR_CLOSE = "adsvar_close" CONF_ADS_VAR_STOP = "adsvar_stop" PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( { vol.Optional(CONF_ADS_VAR): cv.string, vol.Optional(CONF_ADS_VAR_POSITION): cv.string, vol.Optional(CONF_ADS_VAR_SET_POS): cv.string, vol.Optional(CONF_ADS_VAR_CLOSE): cv.string, vol.Optional(CONF_ADS_VAR_OPEN): cv.string, vol.Optional(CONF_ADS_VAR_STOP): cv.string, vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string, vol.Optional(CONF_DEVICE_CLASS): DEVICE_CLASSES_SCHEMA, } ) def setup_platform(hass, config, add_entities, discovery_info=None): """Set up the cover platform for ADS.""" ads_hub = hass.data[DATA_ADS] ads_var_is_closed = config.get(CONF_ADS_VAR) ads_var_position = config.get(CONF_ADS_VAR_POSITION) ads_var_pos_set = config.get(CONF_ADS_VAR_SET_POS) ads_var_open = config.get(CONF_ADS_VAR_OPEN) ads_var_close = config.get(CONF_ADS_VAR_CLOSE) ads_var_stop = config.get(CONF_ADS_VAR_STOP) name = config[CONF_NAME] device_class = config.get(CONF_DEVICE_CLASS) add_entities( [ AdsCover( ads_hub, ads_var_is_closed, ads_var_position, ads_var_pos_set, ads_var_open, ads_var_close, ads_var_stop, name, device_class, ) ] ) class AdsCover(AdsEntity, CoverEntity): """Representation of ADS cover.""" def __init__( self, ads_hub, ads_var_is_closed, ads_var_position, ads_var_pos_set, ads_var_open, ads_var_close, ads_var_stop, name, device_class, ): """Initialize AdsCover entity.""" super().__init__(ads_hub, name, ads_var_is_closed) if self._ads_var is None: if ads_var_position is not None: self._unique_id = ads_var_position elif ads_var_pos_set is not None: self._unique_id = ads_var_pos_set elif ads_var_open is not None: self._unique_id = ads_var_open self._state_dict[STATE_KEY_POSITION] = None self._ads_var_position = ads_var_position self._ads_var_pos_set = ads_var_pos_set self._ads_var_open = ads_var_open self._ads_var_close = ads_var_close self._ads_var_stop = ads_var_stop self._device_class = device_class async def async_added_to_hass(self): """Register device notification.""" if self._ads_var is not None: await self.async_initialize_device( self._ads_var, self._ads_hub.PLCTYPE_BOOL ) if self._ads_var_position is not None: await self.async_initialize_device( self._ads_var_position, self._ads_hub.PLCTYPE_BYTE, STATE_KEY_POSITION ) @property def device_class(self): """Return the class of this cover.""" return self._device_class @property def is_closed(self): """Return if the cover is closed.""" if self._ads_var is not None: return self._state_dict[STATE_KEY_STATE] if self._ads_var_position is not None: return self._state_dict[STATE_KEY_POSITION] == 0 return None @property def current_cover_position(self): """Return current position of cover.""" return self._state_dict[STATE_KEY_POSITION] @property def supported_features(self): """Flag supported features.""" supported_features = SUPPORT_OPEN | SUPPORT_CLOSE if self._ads_var_stop is not None: supported_features |= SUPPORT_STOP if self._ads_var_pos_set is not None: supported_features |= SUPPORT_SET_POSITION return supported_features def stop_cover(self, **kwargs): """Fire the stop action.""" if self._ads_var_stop: self._ads_hub.write_by_name( self._ads_var_stop, True, self._ads_hub.PLCTYPE_BOOL ) def set_cover_position(self, **kwargs): """Set cover position.""" position = kwargs[ATTR_POSITION] if self._ads_var_pos_set is not None: self._ads_hub.write_by_name( self._ads_var_pos_set, position, self._ads_hub.PLCTYPE_BYTE ) def open_cover(self, **kwargs): """Move the cover up.""" if self._ads_var_open is not None: self._ads_hub.write_by_name( self._ads_var_open, True, self._ads_hub.PLCTYPE_BOOL ) elif self._ads_var_pos_set is not None: self.set_cover_position(position=100) def close_cover(self, **kwargs): """Move the cover down.""" if self._ads_var_close is not None: self._ads_hub.write_by_name( self._ads_var_close, True, self._ads_hub.PLCTYPE_BOOL ) elif self._ads_var_pos_set is not None: self.set_cover_position(position=0) @property def available(self): """Return False if state has not been updated yet.""" if self._ads_var is not None or self._ads_var_position is not None: return ( self._state_dict[STATE_KEY_STATE] is not None or self._state_dict[STATE_KEY_POSITION] is not None ) return True
"""The tests for the Prometheus exporter.""" from dataclasses import dataclass import datetime import pytest from homeassistant.components import climate, humidifier, sensor from homeassistant.components.demo.sensor import DemoSensor import homeassistant.components.prometheus as prometheus from homeassistant.const import ( CONCENTRATION_MICROGRAMS_PER_CUBIC_METER, DEGREE, DEVICE_CLASS_POWER, ENERGY_KILO_WATT_HOUR, EVENT_STATE_CHANGED, ) from homeassistant.core import split_entity_id from homeassistant.setup import async_setup_component from homeassistant.util import dt as dt_util import tests.async_mock as mock PROMETHEUS_PATH = "homeassistant.components.prometheus" @dataclass class FilterTest: """Class for capturing a filter test.""" id: str should_pass: bool async def prometheus_client(hass, hass_client): """Initialize an hass_client with Prometheus component.""" await async_setup_component(hass, prometheus.DOMAIN, {prometheus.DOMAIN: {}}) await async_setup_component(hass, sensor.DOMAIN, {"sensor": [{"platform": "demo"}]}) await async_setup_component( hass, climate.DOMAIN, {"climate": [{"platform": "demo"}]} ) await hass.async_block_till_done() await async_setup_component( hass, humidifier.DOMAIN, {"humidifier": [{"platform": "demo"}]} ) sensor1 = DemoSensor( None, "Television Energy", 74, None, ENERGY_KILO_WATT_HOUR, None ) sensor1.hass = hass sensor1.entity_id = "sensor.television_energy" await sensor1.async_update_ha_state() sensor2 = DemoSensor( None, "Radio Energy", 14, DEVICE_CLASS_POWER, ENERGY_KILO_WATT_HOUR, None ) sensor2.hass = hass sensor2.entity_id = "sensor.radio_energy" with mock.patch( "homeassistant.util.dt.utcnow", return_value=datetime.datetime(1970, 1, 2, tzinfo=dt_util.UTC), ): await sensor2.async_update_ha_state() sensor3 = DemoSensor( None, "Electricity price", 0.123, None, f"SEK/{ENERGY_KILO_WATT_HOUR}", None ) sensor3.hass = hass sensor3.entity_id = "sensor.electricity_price" await sensor3.async_update_ha_state() sensor4 = DemoSensor(None, "Wind Direction", 25, None, DEGREE, None) sensor4.hass = hass sensor4.entity_id = "sensor.wind_direction" await sensor4.async_update_ha_state() sensor5 = DemoSensor( None, "SPS30 PM <1µm Weight concentration", 3.7069, None, CONCENTRATION_MICROGRAMS_PER_CUBIC_METER, None, ) sensor5.hass = hass sensor5.entity_id = "sensor.sps30_pm_1um_weight_concentration" await sensor5.async_update_ha_state() return await hass_client() async def test_view(hass, hass_client): """Test prometheus metrics view.""" client = await prometheus_client(hass, hass_client) resp = await client.get(prometheus.API_ENDPOINT) assert resp.status == 200 assert resp.headers["content-type"] == "text/plain" body = await resp.text() body = body.split("\n") assert len(body) > 3 assert "# HELP python_info Python platform information" in body assert ( "# HELP python_gc_objects_collected_total " "Objects collected during gc" in body ) assert ( 'temperature_c{domain="sensor",' 'entity="sensor.outside_temperature",' 'friendly_name="Outside Temperature"} 15.6' in body ) assert ( 'battery_level_percent{domain="sensor",' 'entity="sensor.outside_temperature",' 'friendly_name="Outside Temperature"} 12.0' in body ) assert ( 'current_temperature_c{domain="climate",' 'entity="climate.heatpump",' 'friendly_name="HeatPump"} 25.0' in body ) assert ( 'humidifier_target_humidity_percent{domain="humidifier",' 'entity="humidifier.humidifier",' 'friendly_name="Humidifier"} 68.0' in body ) assert ( 'humidifier_state{domain="humidifier",' 'entity="humidifier.dehumidifier",' 'friendly_name="Dehumidifier"} 1.0' in body ) assert ( 'humidifier_mode{domain="humidifier",' 'entity="humidifier.hygrostat",' 'friendly_name="Hygrostat",' 'mode="home"} 1.0' in body ) assert ( 'humidifier_mode{domain="humidifier",' 'entity="humidifier.hygrostat",' 'friendly_name="Hygrostat",' 'mode="eco"} 0.0' in body ) assert ( 'humidity_percent{domain="sensor",' 'entity="sensor.outside_humidity",' 'friendly_name="Outside Humidity"} 54.0' in body ) assert ( 'sensor_unit_kwh{domain="sensor",' 'entity="sensor.television_energy",' 'friendly_name="Television Energy"} 74.0' in body ) assert ( 'power_kwh{domain="sensor",' 'entity="sensor.radio_energy",' 'friendly_name="Radio Energy"} 14.0' in body ) assert ( 'entity_available{domain="sensor",' 'entity="sensor.radio_energy",' 'friendly_name="Radio Energy"} 1.0' in body ) assert ( 'last_updated_time_seconds{domain="sensor",' 'entity="sensor.radio_energy",' 'friendly_name="Radio Energy"} 86400.0' in body ) assert ( 'sensor_unit_sek_per_kwh{domain="sensor",' 'entity="sensor.electricity_price",' 'friendly_name="Electricity price"} 0.123' in body ) assert ( 'sensor_unit_u0xb0{domain="sensor",' 'entity="sensor.wind_direction",' 'friendly_name="Wind Direction"} 25.0' in body ) assert ( 'sensor_unit_u0xb5g_per_mu0xb3{domain="sensor",' 'entity="sensor.sps30_pm_1um_weight_concentration",' 'friendly_name="SPS30 PM <1µm Weight concentration"} 3.7069' in body ) @pytest.fixture(name="mock_client") def mock_client_fixture(): """Mock the prometheus client.""" with mock.patch(f"{PROMETHEUS_PATH}.prometheus_client") as client: counter_client = mock.MagicMock() client.Counter = mock.MagicMock(return_value=counter_client) setattr(counter_client, "labels", mock.MagicMock(return_value=mock.MagicMock())) yield counter_client @pytest.fixture def mock_bus(hass): """Mock the event bus listener.""" hass.bus.listen = mock.MagicMock() @pytest.mark.usefixtures("mock_bus") async def test_minimal_config(hass, mock_client): """Test the minimal config and defaults of component.""" config = {prometheus.DOMAIN: {}} assert await async_setup_component(hass, prometheus.DOMAIN, config) await hass.async_block_till_done() assert hass.bus.listen.called assert EVENT_STATE_CHANGED == hass.bus.listen.call_args_list[0][0][0] @pytest.mark.usefixtures("mock_bus") async def test_full_config(hass, mock_client): """Test the full config of component.""" config = { prometheus.DOMAIN: { "namespace": "ns", "default_metric": "m", "override_metric": "m", "component_config": {"fake.test": {"override_metric": "km"}}, "component_config_glob": {"fake.time_*": {"override_metric": "h"}}, "component_config_domain": {"climate": {"override_metric": "°C"}}, "filter": { "include_domains": ["climate"], "include_entity_globs": ["fake.time_*"], "include_entities": ["fake.test"], "exclude_domains": ["script"], "exclude_entity_globs": ["climate.excluded_*"], "exclude_entities": ["fake.time_excluded"], }, } } assert await async_setup_component(hass, prometheus.DOMAIN, config) await hass.async_block_till_done() assert hass.bus.listen.called assert EVENT_STATE_CHANGED == hass.bus.listen.call_args_list[0][0][0] def make_event(entity_id): """Make a mock event for test.""" domain = split_entity_id(entity_id)[0] state = mock.MagicMock( state="not blank", domain=domain, entity_id=entity_id, object_id="entity", attributes={}, ) return mock.MagicMock(data={"new_state": state}, time_fired=12345) async def _setup(hass, filter_config): """Shared set up for filtering tests.""" config = {prometheus.DOMAIN: {"filter": filter_config}} assert await async_setup_component(hass, prometheus.DOMAIN, config) await hass.async_block_till_done() return hass.bus.listen.call_args_list[0][0][1] @pytest.mark.usefixtures("mock_bus") async def test_allowlist(hass, mock_client): """Test an allowlist only config.""" handler_method = await _setup( hass, { "include_domains": ["fake"], "include_entity_globs": ["test.included_*"], "include_entities": ["not_real.included"], }, ) tests = [ FilterTest("climate.excluded", False), FilterTest("fake.included", True), FilterTest("test.excluded_test", False), FilterTest("test.included_test", True), FilterTest("not_real.included", True), FilterTest("not_real.excluded", False), ] for test in tests: event = make_event(test.id) handler_method(event) was_called = mock_client.labels.call_count == 1 assert test.should_pass == was_called mock_client.labels.reset_mock() @pytest.mark.usefixtures("mock_bus") async def test_denylist(hass, mock_client): """Test a denylist only config.""" handler_method = await _setup( hass, { "exclude_domains": ["fake"], "exclude_entity_globs": ["test.excluded_*"], "exclude_entities": ["not_real.excluded"], }, ) tests = [ FilterTest("fake.excluded", False), FilterTest("light.included", True), FilterTest("test.excluded_test", False), FilterTest("test.included_test", True), FilterTest("not_real.included", True), FilterTest("not_real.excluded", False), ] for test in tests: event = make_event(test.id) handler_method(event) was_called = mock_client.labels.call_count == 1 assert test.should_pass == was_called mock_client.labels.reset_mock() @pytest.mark.usefixtures("mock_bus") async def test_filtered_denylist(hass, mock_client): """Test a denylist config with a filtering allowlist.""" handler_method = await _setup( hass, { "include_entities": ["fake.included", "test.excluded_test"], "exclude_domains": ["fake"], "exclude_entity_globs": ["*.excluded_*"], "exclude_entities": ["not_real.excluded"], }, ) tests = [ FilterTest("fake.excluded", False), FilterTest("fake.included", True), FilterTest("alt_fake.excluded_test", False), FilterTest("test.excluded_test", True), FilterTest("not_real.excluded", False), FilterTest("not_real.included", True), ] for test in tests: event = make_event(test.id) handler_method(event) was_called = mock_client.labels.call_count == 1 assert test.should_pass == was_called mock_client.labels.reset_mock()
tchellomello/home-assistant
tests/components/prometheus/test_init.py
homeassistant/components/ads/cover.py
"""Support for ESPHome switches.""" import logging from typing import Optional from aioesphomeapi import SwitchInfo, SwitchState from homeassistant.components.switch import SwitchEntity from homeassistant.config_entries import ConfigEntry from homeassistant.helpers.typing import HomeAssistantType from . import EsphomeEntity, esphome_state_property, platform_async_setup_entry _LOGGER = logging.getLogger(__name__) async def async_setup_entry( hass: HomeAssistantType, entry: ConfigEntry, async_add_entities ) -> None: """Set up ESPHome switches based on a config entry.""" await platform_async_setup_entry( hass, entry, async_add_entities, component_key="switch", info_type=SwitchInfo, entity_type=EsphomeSwitch, state_type=SwitchState, ) class EsphomeSwitch(EsphomeEntity, SwitchEntity): """A switch implementation for ESPHome.""" @property def _static_info(self) -> SwitchInfo: return super()._static_info @property def _state(self) -> Optional[SwitchState]: return super()._state @property def icon(self) -> str: """Return the icon.""" return self._static_info.icon @property def assumed_state(self) -> bool: """Return true if we do optimistic updates.""" return self._static_info.assumed_state # https://github.com/PyCQA/pylint/issues/3150 for @esphome_state_property # pylint: disable=invalid-overridden-method @esphome_state_property def is_on(self) -> Optional[bool]: """Return true if the switch is on.""" return self._state.state async def async_turn_on(self, **kwargs) -> None: """Turn the entity on.""" await self._client.switch_command(self._static_info.key, True) async def async_turn_off(self, **kwargs) -> None: """Turn the entity off.""" await self._client.switch_command(self._static_info.key, False)
"""The tests for the Prometheus exporter.""" from dataclasses import dataclass import datetime import pytest from homeassistant.components import climate, humidifier, sensor from homeassistant.components.demo.sensor import DemoSensor import homeassistant.components.prometheus as prometheus from homeassistant.const import ( CONCENTRATION_MICROGRAMS_PER_CUBIC_METER, DEGREE, DEVICE_CLASS_POWER, ENERGY_KILO_WATT_HOUR, EVENT_STATE_CHANGED, ) from homeassistant.core import split_entity_id from homeassistant.setup import async_setup_component from homeassistant.util import dt as dt_util import tests.async_mock as mock PROMETHEUS_PATH = "homeassistant.components.prometheus" @dataclass class FilterTest: """Class for capturing a filter test.""" id: str should_pass: bool async def prometheus_client(hass, hass_client): """Initialize an hass_client with Prometheus component.""" await async_setup_component(hass, prometheus.DOMAIN, {prometheus.DOMAIN: {}}) await async_setup_component(hass, sensor.DOMAIN, {"sensor": [{"platform": "demo"}]}) await async_setup_component( hass, climate.DOMAIN, {"climate": [{"platform": "demo"}]} ) await hass.async_block_till_done() await async_setup_component( hass, humidifier.DOMAIN, {"humidifier": [{"platform": "demo"}]} ) sensor1 = DemoSensor( None, "Television Energy", 74, None, ENERGY_KILO_WATT_HOUR, None ) sensor1.hass = hass sensor1.entity_id = "sensor.television_energy" await sensor1.async_update_ha_state() sensor2 = DemoSensor( None, "Radio Energy", 14, DEVICE_CLASS_POWER, ENERGY_KILO_WATT_HOUR, None ) sensor2.hass = hass sensor2.entity_id = "sensor.radio_energy" with mock.patch( "homeassistant.util.dt.utcnow", return_value=datetime.datetime(1970, 1, 2, tzinfo=dt_util.UTC), ): await sensor2.async_update_ha_state() sensor3 = DemoSensor( None, "Electricity price", 0.123, None, f"SEK/{ENERGY_KILO_WATT_HOUR}", None ) sensor3.hass = hass sensor3.entity_id = "sensor.electricity_price" await sensor3.async_update_ha_state() sensor4 = DemoSensor(None, "Wind Direction", 25, None, DEGREE, None) sensor4.hass = hass sensor4.entity_id = "sensor.wind_direction" await sensor4.async_update_ha_state() sensor5 = DemoSensor( None, "SPS30 PM <1µm Weight concentration", 3.7069, None, CONCENTRATION_MICROGRAMS_PER_CUBIC_METER, None, ) sensor5.hass = hass sensor5.entity_id = "sensor.sps30_pm_1um_weight_concentration" await sensor5.async_update_ha_state() return await hass_client() async def test_view(hass, hass_client): """Test prometheus metrics view.""" client = await prometheus_client(hass, hass_client) resp = await client.get(prometheus.API_ENDPOINT) assert resp.status == 200 assert resp.headers["content-type"] == "text/plain" body = await resp.text() body = body.split("\n") assert len(body) > 3 assert "# HELP python_info Python platform information" in body assert ( "# HELP python_gc_objects_collected_total " "Objects collected during gc" in body ) assert ( 'temperature_c{domain="sensor",' 'entity="sensor.outside_temperature",' 'friendly_name="Outside Temperature"} 15.6' in body ) assert ( 'battery_level_percent{domain="sensor",' 'entity="sensor.outside_temperature",' 'friendly_name="Outside Temperature"} 12.0' in body ) assert ( 'current_temperature_c{domain="climate",' 'entity="climate.heatpump",' 'friendly_name="HeatPump"} 25.0' in body ) assert ( 'humidifier_target_humidity_percent{domain="humidifier",' 'entity="humidifier.humidifier",' 'friendly_name="Humidifier"} 68.0' in body ) assert ( 'humidifier_state{domain="humidifier",' 'entity="humidifier.dehumidifier",' 'friendly_name="Dehumidifier"} 1.0' in body ) assert ( 'humidifier_mode{domain="humidifier",' 'entity="humidifier.hygrostat",' 'friendly_name="Hygrostat",' 'mode="home"} 1.0' in body ) assert ( 'humidifier_mode{domain="humidifier",' 'entity="humidifier.hygrostat",' 'friendly_name="Hygrostat",' 'mode="eco"} 0.0' in body ) assert ( 'humidity_percent{domain="sensor",' 'entity="sensor.outside_humidity",' 'friendly_name="Outside Humidity"} 54.0' in body ) assert ( 'sensor_unit_kwh{domain="sensor",' 'entity="sensor.television_energy",' 'friendly_name="Television Energy"} 74.0' in body ) assert ( 'power_kwh{domain="sensor",' 'entity="sensor.radio_energy",' 'friendly_name="Radio Energy"} 14.0' in body ) assert ( 'entity_available{domain="sensor",' 'entity="sensor.radio_energy",' 'friendly_name="Radio Energy"} 1.0' in body ) assert ( 'last_updated_time_seconds{domain="sensor",' 'entity="sensor.radio_energy",' 'friendly_name="Radio Energy"} 86400.0' in body ) assert ( 'sensor_unit_sek_per_kwh{domain="sensor",' 'entity="sensor.electricity_price",' 'friendly_name="Electricity price"} 0.123' in body ) assert ( 'sensor_unit_u0xb0{domain="sensor",' 'entity="sensor.wind_direction",' 'friendly_name="Wind Direction"} 25.0' in body ) assert ( 'sensor_unit_u0xb5g_per_mu0xb3{domain="sensor",' 'entity="sensor.sps30_pm_1um_weight_concentration",' 'friendly_name="SPS30 PM <1µm Weight concentration"} 3.7069' in body ) @pytest.fixture(name="mock_client") def mock_client_fixture(): """Mock the prometheus client.""" with mock.patch(f"{PROMETHEUS_PATH}.prometheus_client") as client: counter_client = mock.MagicMock() client.Counter = mock.MagicMock(return_value=counter_client) setattr(counter_client, "labels", mock.MagicMock(return_value=mock.MagicMock())) yield counter_client @pytest.fixture def mock_bus(hass): """Mock the event bus listener.""" hass.bus.listen = mock.MagicMock() @pytest.mark.usefixtures("mock_bus") async def test_minimal_config(hass, mock_client): """Test the minimal config and defaults of component.""" config = {prometheus.DOMAIN: {}} assert await async_setup_component(hass, prometheus.DOMAIN, config) await hass.async_block_till_done() assert hass.bus.listen.called assert EVENT_STATE_CHANGED == hass.bus.listen.call_args_list[0][0][0] @pytest.mark.usefixtures("mock_bus") async def test_full_config(hass, mock_client): """Test the full config of component.""" config = { prometheus.DOMAIN: { "namespace": "ns", "default_metric": "m", "override_metric": "m", "component_config": {"fake.test": {"override_metric": "km"}}, "component_config_glob": {"fake.time_*": {"override_metric": "h"}}, "component_config_domain": {"climate": {"override_metric": "°C"}}, "filter": { "include_domains": ["climate"], "include_entity_globs": ["fake.time_*"], "include_entities": ["fake.test"], "exclude_domains": ["script"], "exclude_entity_globs": ["climate.excluded_*"], "exclude_entities": ["fake.time_excluded"], }, } } assert await async_setup_component(hass, prometheus.DOMAIN, config) await hass.async_block_till_done() assert hass.bus.listen.called assert EVENT_STATE_CHANGED == hass.bus.listen.call_args_list[0][0][0] def make_event(entity_id): """Make a mock event for test.""" domain = split_entity_id(entity_id)[0] state = mock.MagicMock( state="not blank", domain=domain, entity_id=entity_id, object_id="entity", attributes={}, ) return mock.MagicMock(data={"new_state": state}, time_fired=12345) async def _setup(hass, filter_config): """Shared set up for filtering tests.""" config = {prometheus.DOMAIN: {"filter": filter_config}} assert await async_setup_component(hass, prometheus.DOMAIN, config) await hass.async_block_till_done() return hass.bus.listen.call_args_list[0][0][1] @pytest.mark.usefixtures("mock_bus") async def test_allowlist(hass, mock_client): """Test an allowlist only config.""" handler_method = await _setup( hass, { "include_domains": ["fake"], "include_entity_globs": ["test.included_*"], "include_entities": ["not_real.included"], }, ) tests = [ FilterTest("climate.excluded", False), FilterTest("fake.included", True), FilterTest("test.excluded_test", False), FilterTest("test.included_test", True), FilterTest("not_real.included", True), FilterTest("not_real.excluded", False), ] for test in tests: event = make_event(test.id) handler_method(event) was_called = mock_client.labels.call_count == 1 assert test.should_pass == was_called mock_client.labels.reset_mock() @pytest.mark.usefixtures("mock_bus") async def test_denylist(hass, mock_client): """Test a denylist only config.""" handler_method = await _setup( hass, { "exclude_domains": ["fake"], "exclude_entity_globs": ["test.excluded_*"], "exclude_entities": ["not_real.excluded"], }, ) tests = [ FilterTest("fake.excluded", False), FilterTest("light.included", True), FilterTest("test.excluded_test", False), FilterTest("test.included_test", True), FilterTest("not_real.included", True), FilterTest("not_real.excluded", False), ] for test in tests: event = make_event(test.id) handler_method(event) was_called = mock_client.labels.call_count == 1 assert test.should_pass == was_called mock_client.labels.reset_mock() @pytest.mark.usefixtures("mock_bus") async def test_filtered_denylist(hass, mock_client): """Test a denylist config with a filtering allowlist.""" handler_method = await _setup( hass, { "include_entities": ["fake.included", "test.excluded_test"], "exclude_domains": ["fake"], "exclude_entity_globs": ["*.excluded_*"], "exclude_entities": ["not_real.excluded"], }, ) tests = [ FilterTest("fake.excluded", False), FilterTest("fake.included", True), FilterTest("alt_fake.excluded_test", False), FilterTest("test.excluded_test", True), FilterTest("not_real.excluded", False), FilterTest("not_real.included", True), ] for test in tests: event = make_event(test.id) handler_method(event) was_called = mock_client.labels.call_count == 1 assert test.should_pass == was_called mock_client.labels.reset_mock()
tchellomello/home-assistant
tests/components/prometheus/test_init.py
homeassistant/components/esphome/switch.py
"""Support for IOTA wallets.""" from datetime import timedelta import logging from iota import Iota import voluptuous as vol import homeassistant.helpers.config_validation as cv from homeassistant.helpers.discovery import load_platform from homeassistant.helpers.entity import Entity _LOGGER = logging.getLogger(__name__) CONF_IRI = "iri" CONF_TESTNET = "testnet" CONF_WALLET_NAME = "name" CONF_WALLET_SEED = "seed" CONF_WALLETS = "wallets" DOMAIN = "iota" IOTA_PLATFORMS = ["sensor"] SCAN_INTERVAL = timedelta(minutes=10) WALLET_CONFIG = vol.Schema( { vol.Required(CONF_WALLET_NAME): cv.string, vol.Required(CONF_WALLET_SEED): cv.string, } ) CONFIG_SCHEMA = vol.Schema( { DOMAIN: vol.Schema( { vol.Required(CONF_IRI): cv.string, vol.Optional(CONF_TESTNET, default=False): cv.boolean, vol.Required(CONF_WALLETS): vol.All(cv.ensure_list, [WALLET_CONFIG]), } ) }, extra=vol.ALLOW_EXTRA, ) def setup(hass, config): """Set up the IOTA component.""" iota_config = config[DOMAIN] for platform in IOTA_PLATFORMS: load_platform(hass, platform, DOMAIN, iota_config, config) return True class IotaDevice(Entity): """Representation of a IOTA device.""" def __init__(self, name, seed, iri, is_testnet=False): """Initialise the IOTA device.""" self._name = name self._seed = seed self.iri = iri self.is_testnet = is_testnet @property def name(self): """Return the default name of the device.""" return self._name @property def device_state_attributes(self): """Return the state attributes of the device.""" attr = {CONF_WALLET_NAME: self._name} return attr @property def api(self): """Construct API object for interaction with the IRI node.""" return Iota(adapter=self.iri, seed=self._seed)
"""The tests for the Prometheus exporter.""" from dataclasses import dataclass import datetime import pytest from homeassistant.components import climate, humidifier, sensor from homeassistant.components.demo.sensor import DemoSensor import homeassistant.components.prometheus as prometheus from homeassistant.const import ( CONCENTRATION_MICROGRAMS_PER_CUBIC_METER, DEGREE, DEVICE_CLASS_POWER, ENERGY_KILO_WATT_HOUR, EVENT_STATE_CHANGED, ) from homeassistant.core import split_entity_id from homeassistant.setup import async_setup_component from homeassistant.util import dt as dt_util import tests.async_mock as mock PROMETHEUS_PATH = "homeassistant.components.prometheus" @dataclass class FilterTest: """Class for capturing a filter test.""" id: str should_pass: bool async def prometheus_client(hass, hass_client): """Initialize an hass_client with Prometheus component.""" await async_setup_component(hass, prometheus.DOMAIN, {prometheus.DOMAIN: {}}) await async_setup_component(hass, sensor.DOMAIN, {"sensor": [{"platform": "demo"}]}) await async_setup_component( hass, climate.DOMAIN, {"climate": [{"platform": "demo"}]} ) await hass.async_block_till_done() await async_setup_component( hass, humidifier.DOMAIN, {"humidifier": [{"platform": "demo"}]} ) sensor1 = DemoSensor( None, "Television Energy", 74, None, ENERGY_KILO_WATT_HOUR, None ) sensor1.hass = hass sensor1.entity_id = "sensor.television_energy" await sensor1.async_update_ha_state() sensor2 = DemoSensor( None, "Radio Energy", 14, DEVICE_CLASS_POWER, ENERGY_KILO_WATT_HOUR, None ) sensor2.hass = hass sensor2.entity_id = "sensor.radio_energy" with mock.patch( "homeassistant.util.dt.utcnow", return_value=datetime.datetime(1970, 1, 2, tzinfo=dt_util.UTC), ): await sensor2.async_update_ha_state() sensor3 = DemoSensor( None, "Electricity price", 0.123, None, f"SEK/{ENERGY_KILO_WATT_HOUR}", None ) sensor3.hass = hass sensor3.entity_id = "sensor.electricity_price" await sensor3.async_update_ha_state() sensor4 = DemoSensor(None, "Wind Direction", 25, None, DEGREE, None) sensor4.hass = hass sensor4.entity_id = "sensor.wind_direction" await sensor4.async_update_ha_state() sensor5 = DemoSensor( None, "SPS30 PM <1µm Weight concentration", 3.7069, None, CONCENTRATION_MICROGRAMS_PER_CUBIC_METER, None, ) sensor5.hass = hass sensor5.entity_id = "sensor.sps30_pm_1um_weight_concentration" await sensor5.async_update_ha_state() return await hass_client() async def test_view(hass, hass_client): """Test prometheus metrics view.""" client = await prometheus_client(hass, hass_client) resp = await client.get(prometheus.API_ENDPOINT) assert resp.status == 200 assert resp.headers["content-type"] == "text/plain" body = await resp.text() body = body.split("\n") assert len(body) > 3 assert "# HELP python_info Python platform information" in body assert ( "# HELP python_gc_objects_collected_total " "Objects collected during gc" in body ) assert ( 'temperature_c{domain="sensor",' 'entity="sensor.outside_temperature",' 'friendly_name="Outside Temperature"} 15.6' in body ) assert ( 'battery_level_percent{domain="sensor",' 'entity="sensor.outside_temperature",' 'friendly_name="Outside Temperature"} 12.0' in body ) assert ( 'current_temperature_c{domain="climate",' 'entity="climate.heatpump",' 'friendly_name="HeatPump"} 25.0' in body ) assert ( 'humidifier_target_humidity_percent{domain="humidifier",' 'entity="humidifier.humidifier",' 'friendly_name="Humidifier"} 68.0' in body ) assert ( 'humidifier_state{domain="humidifier",' 'entity="humidifier.dehumidifier",' 'friendly_name="Dehumidifier"} 1.0' in body ) assert ( 'humidifier_mode{domain="humidifier",' 'entity="humidifier.hygrostat",' 'friendly_name="Hygrostat",' 'mode="home"} 1.0' in body ) assert ( 'humidifier_mode{domain="humidifier",' 'entity="humidifier.hygrostat",' 'friendly_name="Hygrostat",' 'mode="eco"} 0.0' in body ) assert ( 'humidity_percent{domain="sensor",' 'entity="sensor.outside_humidity",' 'friendly_name="Outside Humidity"} 54.0' in body ) assert ( 'sensor_unit_kwh{domain="sensor",' 'entity="sensor.television_energy",' 'friendly_name="Television Energy"} 74.0' in body ) assert ( 'power_kwh{domain="sensor",' 'entity="sensor.radio_energy",' 'friendly_name="Radio Energy"} 14.0' in body ) assert ( 'entity_available{domain="sensor",' 'entity="sensor.radio_energy",' 'friendly_name="Radio Energy"} 1.0' in body ) assert ( 'last_updated_time_seconds{domain="sensor",' 'entity="sensor.radio_energy",' 'friendly_name="Radio Energy"} 86400.0' in body ) assert ( 'sensor_unit_sek_per_kwh{domain="sensor",' 'entity="sensor.electricity_price",' 'friendly_name="Electricity price"} 0.123' in body ) assert ( 'sensor_unit_u0xb0{domain="sensor",' 'entity="sensor.wind_direction",' 'friendly_name="Wind Direction"} 25.0' in body ) assert ( 'sensor_unit_u0xb5g_per_mu0xb3{domain="sensor",' 'entity="sensor.sps30_pm_1um_weight_concentration",' 'friendly_name="SPS30 PM <1µm Weight concentration"} 3.7069' in body ) @pytest.fixture(name="mock_client") def mock_client_fixture(): """Mock the prometheus client.""" with mock.patch(f"{PROMETHEUS_PATH}.prometheus_client") as client: counter_client = mock.MagicMock() client.Counter = mock.MagicMock(return_value=counter_client) setattr(counter_client, "labels", mock.MagicMock(return_value=mock.MagicMock())) yield counter_client @pytest.fixture def mock_bus(hass): """Mock the event bus listener.""" hass.bus.listen = mock.MagicMock() @pytest.mark.usefixtures("mock_bus") async def test_minimal_config(hass, mock_client): """Test the minimal config and defaults of component.""" config = {prometheus.DOMAIN: {}} assert await async_setup_component(hass, prometheus.DOMAIN, config) await hass.async_block_till_done() assert hass.bus.listen.called assert EVENT_STATE_CHANGED == hass.bus.listen.call_args_list[0][0][0] @pytest.mark.usefixtures("mock_bus") async def test_full_config(hass, mock_client): """Test the full config of component.""" config = { prometheus.DOMAIN: { "namespace": "ns", "default_metric": "m", "override_metric": "m", "component_config": {"fake.test": {"override_metric": "km"}}, "component_config_glob": {"fake.time_*": {"override_metric": "h"}}, "component_config_domain": {"climate": {"override_metric": "°C"}}, "filter": { "include_domains": ["climate"], "include_entity_globs": ["fake.time_*"], "include_entities": ["fake.test"], "exclude_domains": ["script"], "exclude_entity_globs": ["climate.excluded_*"], "exclude_entities": ["fake.time_excluded"], }, } } assert await async_setup_component(hass, prometheus.DOMAIN, config) await hass.async_block_till_done() assert hass.bus.listen.called assert EVENT_STATE_CHANGED == hass.bus.listen.call_args_list[0][0][0] def make_event(entity_id): """Make a mock event for test.""" domain = split_entity_id(entity_id)[0] state = mock.MagicMock( state="not blank", domain=domain, entity_id=entity_id, object_id="entity", attributes={}, ) return mock.MagicMock(data={"new_state": state}, time_fired=12345) async def _setup(hass, filter_config): """Shared set up for filtering tests.""" config = {prometheus.DOMAIN: {"filter": filter_config}} assert await async_setup_component(hass, prometheus.DOMAIN, config) await hass.async_block_till_done() return hass.bus.listen.call_args_list[0][0][1] @pytest.mark.usefixtures("mock_bus") async def test_allowlist(hass, mock_client): """Test an allowlist only config.""" handler_method = await _setup( hass, { "include_domains": ["fake"], "include_entity_globs": ["test.included_*"], "include_entities": ["not_real.included"], }, ) tests = [ FilterTest("climate.excluded", False), FilterTest("fake.included", True), FilterTest("test.excluded_test", False), FilterTest("test.included_test", True), FilterTest("not_real.included", True), FilterTest("not_real.excluded", False), ] for test in tests: event = make_event(test.id) handler_method(event) was_called = mock_client.labels.call_count == 1 assert test.should_pass == was_called mock_client.labels.reset_mock() @pytest.mark.usefixtures("mock_bus") async def test_denylist(hass, mock_client): """Test a denylist only config.""" handler_method = await _setup( hass, { "exclude_domains": ["fake"], "exclude_entity_globs": ["test.excluded_*"], "exclude_entities": ["not_real.excluded"], }, ) tests = [ FilterTest("fake.excluded", False), FilterTest("light.included", True), FilterTest("test.excluded_test", False), FilterTest("test.included_test", True), FilterTest("not_real.included", True), FilterTest("not_real.excluded", False), ] for test in tests: event = make_event(test.id) handler_method(event) was_called = mock_client.labels.call_count == 1 assert test.should_pass == was_called mock_client.labels.reset_mock() @pytest.mark.usefixtures("mock_bus") async def test_filtered_denylist(hass, mock_client): """Test a denylist config with a filtering allowlist.""" handler_method = await _setup( hass, { "include_entities": ["fake.included", "test.excluded_test"], "exclude_domains": ["fake"], "exclude_entity_globs": ["*.excluded_*"], "exclude_entities": ["not_real.excluded"], }, ) tests = [ FilterTest("fake.excluded", False), FilterTest("fake.included", True), FilterTest("alt_fake.excluded_test", False), FilterTest("test.excluded_test", True), FilterTest("not_real.excluded", False), FilterTest("not_real.included", True), ] for test in tests: event = make_event(test.id) handler_method(event) was_called = mock_client.labels.call_count == 1 assert test.should_pass == was_called mock_client.labels.reset_mock()
tchellomello/home-assistant
tests/components/prometheus/test_init.py
homeassistant/components/iota/__init__.py
"""Provides device automations for Fan.""" from typing import List import voluptuous as vol from homeassistant.components.automation import AutomationActionType from homeassistant.components.device_automation import TRIGGER_BASE_SCHEMA from homeassistant.components.homeassistant.triggers import state as state_trigger from homeassistant.const import ( CONF_DEVICE_ID, CONF_DOMAIN, CONF_ENTITY_ID, CONF_PLATFORM, CONF_TYPE, STATE_OFF, STATE_ON, ) from homeassistant.core import CALLBACK_TYPE, HomeAssistant from homeassistant.helpers import config_validation as cv, entity_registry from homeassistant.helpers.typing import ConfigType from . import DOMAIN TRIGGER_TYPES = {"turned_on", "turned_off"} TRIGGER_SCHEMA = TRIGGER_BASE_SCHEMA.extend( { vol.Required(CONF_ENTITY_ID): cv.entity_id, vol.Required(CONF_TYPE): vol.In(TRIGGER_TYPES), } ) async def async_get_triggers(hass: HomeAssistant, device_id: str) -> List[dict]: """List device triggers for Fan devices.""" registry = await entity_registry.async_get_registry(hass) triggers = [] # Get all the integrations entities for this device for entry in entity_registry.async_entries_for_device(registry, device_id): if entry.domain != DOMAIN: continue # Add triggers for each entity that belongs to this integration triggers.append( { CONF_PLATFORM: "device", CONF_DEVICE_ID: device_id, CONF_DOMAIN: DOMAIN, CONF_ENTITY_ID: entry.entity_id, CONF_TYPE: "turned_on", } ) triggers.append( { CONF_PLATFORM: "device", CONF_DEVICE_ID: device_id, CONF_DOMAIN: DOMAIN, CONF_ENTITY_ID: entry.entity_id, CONF_TYPE: "turned_off", } ) return triggers async def async_attach_trigger( hass: HomeAssistant, config: ConfigType, action: AutomationActionType, automation_info: dict, ) -> CALLBACK_TYPE: """Attach a trigger.""" config = TRIGGER_SCHEMA(config) if config[CONF_TYPE] == "turned_on": from_state = STATE_OFF to_state = STATE_ON else: from_state = STATE_ON to_state = STATE_OFF state_config = { state_trigger.CONF_PLATFORM: "state", CONF_ENTITY_ID: config[CONF_ENTITY_ID], state_trigger.CONF_FROM: from_state, state_trigger.CONF_TO: to_state, } state_config = state_trigger.TRIGGER_SCHEMA(state_config) return await state_trigger.async_attach_trigger( hass, state_config, action, automation_info, platform_type="device" )
"""The tests for the Prometheus exporter.""" from dataclasses import dataclass import datetime import pytest from homeassistant.components import climate, humidifier, sensor from homeassistant.components.demo.sensor import DemoSensor import homeassistant.components.prometheus as prometheus from homeassistant.const import ( CONCENTRATION_MICROGRAMS_PER_CUBIC_METER, DEGREE, DEVICE_CLASS_POWER, ENERGY_KILO_WATT_HOUR, EVENT_STATE_CHANGED, ) from homeassistant.core import split_entity_id from homeassistant.setup import async_setup_component from homeassistant.util import dt as dt_util import tests.async_mock as mock PROMETHEUS_PATH = "homeassistant.components.prometheus" @dataclass class FilterTest: """Class for capturing a filter test.""" id: str should_pass: bool async def prometheus_client(hass, hass_client): """Initialize an hass_client with Prometheus component.""" await async_setup_component(hass, prometheus.DOMAIN, {prometheus.DOMAIN: {}}) await async_setup_component(hass, sensor.DOMAIN, {"sensor": [{"platform": "demo"}]}) await async_setup_component( hass, climate.DOMAIN, {"climate": [{"platform": "demo"}]} ) await hass.async_block_till_done() await async_setup_component( hass, humidifier.DOMAIN, {"humidifier": [{"platform": "demo"}]} ) sensor1 = DemoSensor( None, "Television Energy", 74, None, ENERGY_KILO_WATT_HOUR, None ) sensor1.hass = hass sensor1.entity_id = "sensor.television_energy" await sensor1.async_update_ha_state() sensor2 = DemoSensor( None, "Radio Energy", 14, DEVICE_CLASS_POWER, ENERGY_KILO_WATT_HOUR, None ) sensor2.hass = hass sensor2.entity_id = "sensor.radio_energy" with mock.patch( "homeassistant.util.dt.utcnow", return_value=datetime.datetime(1970, 1, 2, tzinfo=dt_util.UTC), ): await sensor2.async_update_ha_state() sensor3 = DemoSensor( None, "Electricity price", 0.123, None, f"SEK/{ENERGY_KILO_WATT_HOUR}", None ) sensor3.hass = hass sensor3.entity_id = "sensor.electricity_price" await sensor3.async_update_ha_state() sensor4 = DemoSensor(None, "Wind Direction", 25, None, DEGREE, None) sensor4.hass = hass sensor4.entity_id = "sensor.wind_direction" await sensor4.async_update_ha_state() sensor5 = DemoSensor( None, "SPS30 PM <1µm Weight concentration", 3.7069, None, CONCENTRATION_MICROGRAMS_PER_CUBIC_METER, None, ) sensor5.hass = hass sensor5.entity_id = "sensor.sps30_pm_1um_weight_concentration" await sensor5.async_update_ha_state() return await hass_client() async def test_view(hass, hass_client): """Test prometheus metrics view.""" client = await prometheus_client(hass, hass_client) resp = await client.get(prometheus.API_ENDPOINT) assert resp.status == 200 assert resp.headers["content-type"] == "text/plain" body = await resp.text() body = body.split("\n") assert len(body) > 3 assert "# HELP python_info Python platform information" in body assert ( "# HELP python_gc_objects_collected_total " "Objects collected during gc" in body ) assert ( 'temperature_c{domain="sensor",' 'entity="sensor.outside_temperature",' 'friendly_name="Outside Temperature"} 15.6' in body ) assert ( 'battery_level_percent{domain="sensor",' 'entity="sensor.outside_temperature",' 'friendly_name="Outside Temperature"} 12.0' in body ) assert ( 'current_temperature_c{domain="climate",' 'entity="climate.heatpump",' 'friendly_name="HeatPump"} 25.0' in body ) assert ( 'humidifier_target_humidity_percent{domain="humidifier",' 'entity="humidifier.humidifier",' 'friendly_name="Humidifier"} 68.0' in body ) assert ( 'humidifier_state{domain="humidifier",' 'entity="humidifier.dehumidifier",' 'friendly_name="Dehumidifier"} 1.0' in body ) assert ( 'humidifier_mode{domain="humidifier",' 'entity="humidifier.hygrostat",' 'friendly_name="Hygrostat",' 'mode="home"} 1.0' in body ) assert ( 'humidifier_mode{domain="humidifier",' 'entity="humidifier.hygrostat",' 'friendly_name="Hygrostat",' 'mode="eco"} 0.0' in body ) assert ( 'humidity_percent{domain="sensor",' 'entity="sensor.outside_humidity",' 'friendly_name="Outside Humidity"} 54.0' in body ) assert ( 'sensor_unit_kwh{domain="sensor",' 'entity="sensor.television_energy",' 'friendly_name="Television Energy"} 74.0' in body ) assert ( 'power_kwh{domain="sensor",' 'entity="sensor.radio_energy",' 'friendly_name="Radio Energy"} 14.0' in body ) assert ( 'entity_available{domain="sensor",' 'entity="sensor.radio_energy",' 'friendly_name="Radio Energy"} 1.0' in body ) assert ( 'last_updated_time_seconds{domain="sensor",' 'entity="sensor.radio_energy",' 'friendly_name="Radio Energy"} 86400.0' in body ) assert ( 'sensor_unit_sek_per_kwh{domain="sensor",' 'entity="sensor.electricity_price",' 'friendly_name="Electricity price"} 0.123' in body ) assert ( 'sensor_unit_u0xb0{domain="sensor",' 'entity="sensor.wind_direction",' 'friendly_name="Wind Direction"} 25.0' in body ) assert ( 'sensor_unit_u0xb5g_per_mu0xb3{domain="sensor",' 'entity="sensor.sps30_pm_1um_weight_concentration",' 'friendly_name="SPS30 PM <1µm Weight concentration"} 3.7069' in body ) @pytest.fixture(name="mock_client") def mock_client_fixture(): """Mock the prometheus client.""" with mock.patch(f"{PROMETHEUS_PATH}.prometheus_client") as client: counter_client = mock.MagicMock() client.Counter = mock.MagicMock(return_value=counter_client) setattr(counter_client, "labels", mock.MagicMock(return_value=mock.MagicMock())) yield counter_client @pytest.fixture def mock_bus(hass): """Mock the event bus listener.""" hass.bus.listen = mock.MagicMock() @pytest.mark.usefixtures("mock_bus") async def test_minimal_config(hass, mock_client): """Test the minimal config and defaults of component.""" config = {prometheus.DOMAIN: {}} assert await async_setup_component(hass, prometheus.DOMAIN, config) await hass.async_block_till_done() assert hass.bus.listen.called assert EVENT_STATE_CHANGED == hass.bus.listen.call_args_list[0][0][0] @pytest.mark.usefixtures("mock_bus") async def test_full_config(hass, mock_client): """Test the full config of component.""" config = { prometheus.DOMAIN: { "namespace": "ns", "default_metric": "m", "override_metric": "m", "component_config": {"fake.test": {"override_metric": "km"}}, "component_config_glob": {"fake.time_*": {"override_metric": "h"}}, "component_config_domain": {"climate": {"override_metric": "°C"}}, "filter": { "include_domains": ["climate"], "include_entity_globs": ["fake.time_*"], "include_entities": ["fake.test"], "exclude_domains": ["script"], "exclude_entity_globs": ["climate.excluded_*"], "exclude_entities": ["fake.time_excluded"], }, } } assert await async_setup_component(hass, prometheus.DOMAIN, config) await hass.async_block_till_done() assert hass.bus.listen.called assert EVENT_STATE_CHANGED == hass.bus.listen.call_args_list[0][0][0] def make_event(entity_id): """Make a mock event for test.""" domain = split_entity_id(entity_id)[0] state = mock.MagicMock( state="not blank", domain=domain, entity_id=entity_id, object_id="entity", attributes={}, ) return mock.MagicMock(data={"new_state": state}, time_fired=12345) async def _setup(hass, filter_config): """Shared set up for filtering tests.""" config = {prometheus.DOMAIN: {"filter": filter_config}} assert await async_setup_component(hass, prometheus.DOMAIN, config) await hass.async_block_till_done() return hass.bus.listen.call_args_list[0][0][1] @pytest.mark.usefixtures("mock_bus") async def test_allowlist(hass, mock_client): """Test an allowlist only config.""" handler_method = await _setup( hass, { "include_domains": ["fake"], "include_entity_globs": ["test.included_*"], "include_entities": ["not_real.included"], }, ) tests = [ FilterTest("climate.excluded", False), FilterTest("fake.included", True), FilterTest("test.excluded_test", False), FilterTest("test.included_test", True), FilterTest("not_real.included", True), FilterTest("not_real.excluded", False), ] for test in tests: event = make_event(test.id) handler_method(event) was_called = mock_client.labels.call_count == 1 assert test.should_pass == was_called mock_client.labels.reset_mock() @pytest.mark.usefixtures("mock_bus") async def test_denylist(hass, mock_client): """Test a denylist only config.""" handler_method = await _setup( hass, { "exclude_domains": ["fake"], "exclude_entity_globs": ["test.excluded_*"], "exclude_entities": ["not_real.excluded"], }, ) tests = [ FilterTest("fake.excluded", False), FilterTest("light.included", True), FilterTest("test.excluded_test", False), FilterTest("test.included_test", True), FilterTest("not_real.included", True), FilterTest("not_real.excluded", False), ] for test in tests: event = make_event(test.id) handler_method(event) was_called = mock_client.labels.call_count == 1 assert test.should_pass == was_called mock_client.labels.reset_mock() @pytest.mark.usefixtures("mock_bus") async def test_filtered_denylist(hass, mock_client): """Test a denylist config with a filtering allowlist.""" handler_method = await _setup( hass, { "include_entities": ["fake.included", "test.excluded_test"], "exclude_domains": ["fake"], "exclude_entity_globs": ["*.excluded_*"], "exclude_entities": ["not_real.excluded"], }, ) tests = [ FilterTest("fake.excluded", False), FilterTest("fake.included", True), FilterTest("alt_fake.excluded_test", False), FilterTest("test.excluded_test", True), FilterTest("not_real.excluded", False), FilterTest("not_real.included", True), ] for test in tests: event = make_event(test.id) handler_method(event) was_called = mock_client.labels.call_count == 1 assert test.should_pass == was_called mock_client.labels.reset_mock()
tchellomello/home-assistant
tests/components/prometheus/test_init.py
homeassistant/components/fan/device_trigger.py
"""JSON utility functions.""" from collections import deque import json import logging import os import tempfile from typing import Any, Callable, Dict, List, Optional, Type, Union from homeassistant.core import Event, State from homeassistant.exceptions import HomeAssistantError _LOGGER = logging.getLogger(__name__) class SerializationError(HomeAssistantError): """Error serializing the data to JSON.""" class WriteError(HomeAssistantError): """Error writing the data.""" def load_json( filename: str, default: Union[List, Dict, None] = None ) -> Union[List, Dict]: """Load JSON data from a file and return as dict or list. Defaults to returning empty dict if file is not found. """ try: with open(filename, encoding="utf-8") as fdesc: return json.loads(fdesc.read()) # type: ignore except FileNotFoundError: # This is not a fatal error _LOGGER.debug("JSON file not found: %s", filename) except ValueError as error: _LOGGER.exception("Could not parse JSON content: %s", filename) raise HomeAssistantError(error) from error except OSError as error: _LOGGER.exception("JSON file reading failed: %s", filename) raise HomeAssistantError(error) from error return {} if default is None else default def save_json( filename: str, data: Union[List, Dict], private: bool = False, *, encoder: Optional[Type[json.JSONEncoder]] = None, ) -> None: """Save JSON data to a file. Returns True on success. """ try: json_data = json.dumps(data, indent=4, cls=encoder) except TypeError as error: msg = f"Failed to serialize to JSON: {filename}. Bad data at {format_unserializable_data(find_paths_unserializable_data(data))}" _LOGGER.error(msg) raise SerializationError(msg) from error tmp_filename = "" tmp_path = os.path.split(filename)[0] try: # Modern versions of Python tempfile create this file with mode 0o600 with tempfile.NamedTemporaryFile( mode="w", encoding="utf-8", dir=tmp_path, delete=False ) as fdesc: fdesc.write(json_data) tmp_filename = fdesc.name if not private: os.chmod(tmp_filename, 0o644) os.replace(tmp_filename, filename) except OSError as error: _LOGGER.exception("Saving JSON file failed: %s", filename) raise WriteError(error) from error finally: if os.path.exists(tmp_filename): try: os.remove(tmp_filename) except OSError as err: # If we are cleaning up then something else went wrong, so # we should suppress likely follow-on errors in the cleanup _LOGGER.error("JSON replacement cleanup failed: %s", err) def format_unserializable_data(data: Dict[str, Any]) -> str: """Format output of find_paths in a friendly way. Format is comma separated: <path>=<value>(<type>) """ return ", ".join(f"{path}={value}({type(value)}" for path, value in data.items()) def find_paths_unserializable_data( bad_data: Any, *, dump: Callable[[Any], str] = json.dumps ) -> Dict[str, Any]: """Find the paths to unserializable data. This method is slow! Only use for error handling. """ to_process = deque([(bad_data, "$")]) invalid = {} while to_process: obj, obj_path = to_process.popleft() try: dump(obj) continue except (ValueError, TypeError): pass # We convert states and events to dict so we can find bad data inside it if isinstance(obj, State): obj_path += f"(state: {obj.entity_id})" obj = obj.as_dict() elif isinstance(obj, Event): obj_path += f"(event: {obj.event_type})" obj = obj.as_dict() if isinstance(obj, dict): for key, value in obj.items(): try: # Is key valid? dump({key: None}) except TypeError: invalid[f"{obj_path}<key: {key}>"] = key else: # Process value to_process.append((value, f"{obj_path}.{key}")) elif isinstance(obj, list): for idx, value in enumerate(obj): to_process.append((value, f"{obj_path}[{idx}]")) else: invalid[obj_path] = obj return invalid
"""The tests for the Prometheus exporter.""" from dataclasses import dataclass import datetime import pytest from homeassistant.components import climate, humidifier, sensor from homeassistant.components.demo.sensor import DemoSensor import homeassistant.components.prometheus as prometheus from homeassistant.const import ( CONCENTRATION_MICROGRAMS_PER_CUBIC_METER, DEGREE, DEVICE_CLASS_POWER, ENERGY_KILO_WATT_HOUR, EVENT_STATE_CHANGED, ) from homeassistant.core import split_entity_id from homeassistant.setup import async_setup_component from homeassistant.util import dt as dt_util import tests.async_mock as mock PROMETHEUS_PATH = "homeassistant.components.prometheus" @dataclass class FilterTest: """Class for capturing a filter test.""" id: str should_pass: bool async def prometheus_client(hass, hass_client): """Initialize an hass_client with Prometheus component.""" await async_setup_component(hass, prometheus.DOMAIN, {prometheus.DOMAIN: {}}) await async_setup_component(hass, sensor.DOMAIN, {"sensor": [{"platform": "demo"}]}) await async_setup_component( hass, climate.DOMAIN, {"climate": [{"platform": "demo"}]} ) await hass.async_block_till_done() await async_setup_component( hass, humidifier.DOMAIN, {"humidifier": [{"platform": "demo"}]} ) sensor1 = DemoSensor( None, "Television Energy", 74, None, ENERGY_KILO_WATT_HOUR, None ) sensor1.hass = hass sensor1.entity_id = "sensor.television_energy" await sensor1.async_update_ha_state() sensor2 = DemoSensor( None, "Radio Energy", 14, DEVICE_CLASS_POWER, ENERGY_KILO_WATT_HOUR, None ) sensor2.hass = hass sensor2.entity_id = "sensor.radio_energy" with mock.patch( "homeassistant.util.dt.utcnow", return_value=datetime.datetime(1970, 1, 2, tzinfo=dt_util.UTC), ): await sensor2.async_update_ha_state() sensor3 = DemoSensor( None, "Electricity price", 0.123, None, f"SEK/{ENERGY_KILO_WATT_HOUR}", None ) sensor3.hass = hass sensor3.entity_id = "sensor.electricity_price" await sensor3.async_update_ha_state() sensor4 = DemoSensor(None, "Wind Direction", 25, None, DEGREE, None) sensor4.hass = hass sensor4.entity_id = "sensor.wind_direction" await sensor4.async_update_ha_state() sensor5 = DemoSensor( None, "SPS30 PM <1µm Weight concentration", 3.7069, None, CONCENTRATION_MICROGRAMS_PER_CUBIC_METER, None, ) sensor5.hass = hass sensor5.entity_id = "sensor.sps30_pm_1um_weight_concentration" await sensor5.async_update_ha_state() return await hass_client() async def test_view(hass, hass_client): """Test prometheus metrics view.""" client = await prometheus_client(hass, hass_client) resp = await client.get(prometheus.API_ENDPOINT) assert resp.status == 200 assert resp.headers["content-type"] == "text/plain" body = await resp.text() body = body.split("\n") assert len(body) > 3 assert "# HELP python_info Python platform information" in body assert ( "# HELP python_gc_objects_collected_total " "Objects collected during gc" in body ) assert ( 'temperature_c{domain="sensor",' 'entity="sensor.outside_temperature",' 'friendly_name="Outside Temperature"} 15.6' in body ) assert ( 'battery_level_percent{domain="sensor",' 'entity="sensor.outside_temperature",' 'friendly_name="Outside Temperature"} 12.0' in body ) assert ( 'current_temperature_c{domain="climate",' 'entity="climate.heatpump",' 'friendly_name="HeatPump"} 25.0' in body ) assert ( 'humidifier_target_humidity_percent{domain="humidifier",' 'entity="humidifier.humidifier",' 'friendly_name="Humidifier"} 68.0' in body ) assert ( 'humidifier_state{domain="humidifier",' 'entity="humidifier.dehumidifier",' 'friendly_name="Dehumidifier"} 1.0' in body ) assert ( 'humidifier_mode{domain="humidifier",' 'entity="humidifier.hygrostat",' 'friendly_name="Hygrostat",' 'mode="home"} 1.0' in body ) assert ( 'humidifier_mode{domain="humidifier",' 'entity="humidifier.hygrostat",' 'friendly_name="Hygrostat",' 'mode="eco"} 0.0' in body ) assert ( 'humidity_percent{domain="sensor",' 'entity="sensor.outside_humidity",' 'friendly_name="Outside Humidity"} 54.0' in body ) assert ( 'sensor_unit_kwh{domain="sensor",' 'entity="sensor.television_energy",' 'friendly_name="Television Energy"} 74.0' in body ) assert ( 'power_kwh{domain="sensor",' 'entity="sensor.radio_energy",' 'friendly_name="Radio Energy"} 14.0' in body ) assert ( 'entity_available{domain="sensor",' 'entity="sensor.radio_energy",' 'friendly_name="Radio Energy"} 1.0' in body ) assert ( 'last_updated_time_seconds{domain="sensor",' 'entity="sensor.radio_energy",' 'friendly_name="Radio Energy"} 86400.0' in body ) assert ( 'sensor_unit_sek_per_kwh{domain="sensor",' 'entity="sensor.electricity_price",' 'friendly_name="Electricity price"} 0.123' in body ) assert ( 'sensor_unit_u0xb0{domain="sensor",' 'entity="sensor.wind_direction",' 'friendly_name="Wind Direction"} 25.0' in body ) assert ( 'sensor_unit_u0xb5g_per_mu0xb3{domain="sensor",' 'entity="sensor.sps30_pm_1um_weight_concentration",' 'friendly_name="SPS30 PM <1µm Weight concentration"} 3.7069' in body ) @pytest.fixture(name="mock_client") def mock_client_fixture(): """Mock the prometheus client.""" with mock.patch(f"{PROMETHEUS_PATH}.prometheus_client") as client: counter_client = mock.MagicMock() client.Counter = mock.MagicMock(return_value=counter_client) setattr(counter_client, "labels", mock.MagicMock(return_value=mock.MagicMock())) yield counter_client @pytest.fixture def mock_bus(hass): """Mock the event bus listener.""" hass.bus.listen = mock.MagicMock() @pytest.mark.usefixtures("mock_bus") async def test_minimal_config(hass, mock_client): """Test the minimal config and defaults of component.""" config = {prometheus.DOMAIN: {}} assert await async_setup_component(hass, prometheus.DOMAIN, config) await hass.async_block_till_done() assert hass.bus.listen.called assert EVENT_STATE_CHANGED == hass.bus.listen.call_args_list[0][0][0] @pytest.mark.usefixtures("mock_bus") async def test_full_config(hass, mock_client): """Test the full config of component.""" config = { prometheus.DOMAIN: { "namespace": "ns", "default_metric": "m", "override_metric": "m", "component_config": {"fake.test": {"override_metric": "km"}}, "component_config_glob": {"fake.time_*": {"override_metric": "h"}}, "component_config_domain": {"climate": {"override_metric": "°C"}}, "filter": { "include_domains": ["climate"], "include_entity_globs": ["fake.time_*"], "include_entities": ["fake.test"], "exclude_domains": ["script"], "exclude_entity_globs": ["climate.excluded_*"], "exclude_entities": ["fake.time_excluded"], }, } } assert await async_setup_component(hass, prometheus.DOMAIN, config) await hass.async_block_till_done() assert hass.bus.listen.called assert EVENT_STATE_CHANGED == hass.bus.listen.call_args_list[0][0][0] def make_event(entity_id): """Make a mock event for test.""" domain = split_entity_id(entity_id)[0] state = mock.MagicMock( state="not blank", domain=domain, entity_id=entity_id, object_id="entity", attributes={}, ) return mock.MagicMock(data={"new_state": state}, time_fired=12345) async def _setup(hass, filter_config): """Shared set up for filtering tests.""" config = {prometheus.DOMAIN: {"filter": filter_config}} assert await async_setup_component(hass, prometheus.DOMAIN, config) await hass.async_block_till_done() return hass.bus.listen.call_args_list[0][0][1] @pytest.mark.usefixtures("mock_bus") async def test_allowlist(hass, mock_client): """Test an allowlist only config.""" handler_method = await _setup( hass, { "include_domains": ["fake"], "include_entity_globs": ["test.included_*"], "include_entities": ["not_real.included"], }, ) tests = [ FilterTest("climate.excluded", False), FilterTest("fake.included", True), FilterTest("test.excluded_test", False), FilterTest("test.included_test", True), FilterTest("not_real.included", True), FilterTest("not_real.excluded", False), ] for test in tests: event = make_event(test.id) handler_method(event) was_called = mock_client.labels.call_count == 1 assert test.should_pass == was_called mock_client.labels.reset_mock() @pytest.mark.usefixtures("mock_bus") async def test_denylist(hass, mock_client): """Test a denylist only config.""" handler_method = await _setup( hass, { "exclude_domains": ["fake"], "exclude_entity_globs": ["test.excluded_*"], "exclude_entities": ["not_real.excluded"], }, ) tests = [ FilterTest("fake.excluded", False), FilterTest("light.included", True), FilterTest("test.excluded_test", False), FilterTest("test.included_test", True), FilterTest("not_real.included", True), FilterTest("not_real.excluded", False), ] for test in tests: event = make_event(test.id) handler_method(event) was_called = mock_client.labels.call_count == 1 assert test.should_pass == was_called mock_client.labels.reset_mock() @pytest.mark.usefixtures("mock_bus") async def test_filtered_denylist(hass, mock_client): """Test a denylist config with a filtering allowlist.""" handler_method = await _setup( hass, { "include_entities": ["fake.included", "test.excluded_test"], "exclude_domains": ["fake"], "exclude_entity_globs": ["*.excluded_*"], "exclude_entities": ["not_real.excluded"], }, ) tests = [ FilterTest("fake.excluded", False), FilterTest("fake.included", True), FilterTest("alt_fake.excluded_test", False), FilterTest("test.excluded_test", True), FilterTest("not_real.excluded", False), FilterTest("not_real.included", True), ] for test in tests: event = make_event(test.id) handler_method(event) was_called = mock_client.labels.call_count == 1 assert test.should_pass == was_called mock_client.labels.reset_mock()
tchellomello/home-assistant
tests/components/prometheus/test_init.py
homeassistant/util/json.py
"""Describe logbook events.""" from homeassistant.components.logbook import LazyEventPartialState from homeassistant.const import ATTR_ENTITY_ID, ATTR_NAME from homeassistant.core import HomeAssistant, callback from . import ATTR_SOURCE, DOMAIN, EVENT_AUTOMATION_TRIGGERED @callback def async_describe_events(hass: HomeAssistant, async_describe_event): # type: ignore """Describe logbook events.""" @callback def async_describe_logbook_event(event: LazyEventPartialState): # type: ignore """Describe a logbook event.""" data = event.data message = "has been triggered" if ATTR_SOURCE in data: message = f"{message} by {data[ATTR_SOURCE]}" return { "name": data.get(ATTR_NAME), "message": message, "source": data.get(ATTR_SOURCE), "entity_id": data.get(ATTR_ENTITY_ID), "context_id": event.context_id, } async_describe_event( DOMAIN, EVENT_AUTOMATION_TRIGGERED, async_describe_logbook_event )
"""The tests for Lock device conditions.""" import pytest import homeassistant.components.automation as automation from homeassistant.components.lock import DOMAIN from homeassistant.const import STATE_LOCKED, STATE_UNLOCKED from homeassistant.helpers import device_registry from homeassistant.setup import async_setup_component from tests.common import ( MockConfigEntry, assert_lists_same, async_get_device_automations, async_mock_service, mock_device_registry, mock_registry, ) from tests.components.blueprint.conftest import stub_blueprint_populate # noqa: F401 @pytest.fixture def device_reg(hass): """Return an empty, loaded, registry.""" return mock_device_registry(hass) @pytest.fixture def entity_reg(hass): """Return an empty, loaded, registry.""" return mock_registry(hass) @pytest.fixture def calls(hass): """Track calls to a mock service.""" return async_mock_service(hass, "test", "automation") async def test_get_conditions(hass, device_reg, entity_reg): """Test we get the expected conditions from a lock.""" config_entry = MockConfigEntry(domain="test", data={}) config_entry.add_to_hass(hass) device_entry = device_reg.async_get_or_create( config_entry_id=config_entry.entry_id, connections={(device_registry.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")}, ) entity_reg.async_get_or_create(DOMAIN, "test", "5678", device_id=device_entry.id) expected_conditions = [ { "condition": "device", "domain": DOMAIN, "type": "is_locked", "device_id": device_entry.id, "entity_id": f"{DOMAIN}.test_5678", }, { "condition": "device", "domain": DOMAIN, "type": "is_unlocked", "device_id": device_entry.id, "entity_id": f"{DOMAIN}.test_5678", }, ] conditions = await async_get_device_automations(hass, "condition", device_entry.id) assert_lists_same(conditions, expected_conditions) async def test_if_state(hass, calls): """Test for turn_on and turn_off conditions.""" hass.states.async_set("lock.entity", STATE_LOCKED) assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: [ { "trigger": {"platform": "event", "event_type": "test_event1"}, "condition": [ { "condition": "device", "domain": DOMAIN, "device_id": "", "entity_id": "lock.entity", "type": "is_locked", } ], "action": { "service": "test.automation", "data_template": { "some": "is_locked - {{ trigger.platform }} - {{ trigger.event.event_type }}" }, }, }, { "trigger": {"platform": "event", "event_type": "test_event2"}, "condition": [ { "condition": "device", "domain": DOMAIN, "device_id": "", "entity_id": "lock.entity", "type": "is_unlocked", } ], "action": { "service": "test.automation", "data_template": { "some": "is_unlocked - {{ trigger.platform }} - {{ trigger.event.event_type }}" }, }, }, ] }, ) hass.bus.async_fire("test_event1") hass.bus.async_fire("test_event2") await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data["some"] == "is_locked - event - test_event1" hass.states.async_set("lock.entity", STATE_UNLOCKED) hass.bus.async_fire("test_event1") hass.bus.async_fire("test_event2") await hass.async_block_till_done() assert len(calls) == 2 assert calls[1].data["some"] == "is_unlocked - event - test_event2"
adrienbrault/home-assistant
tests/components/lock/test_device_condition.py
homeassistant/components/automation/logbook.py
"""Helpers for listening to events.""" from __future__ import annotations import asyncio import copy from dataclasses import dataclass from datetime import datetime, timedelta import functools as ft import logging import time from typing import Any, Awaitable, Callable, Iterable, List import attr from homeassistant.const import ( ATTR_ENTITY_ID, ATTR_NOW, EVENT_CORE_CONFIG_UPDATE, EVENT_STATE_CHANGED, EVENT_TIME_CHANGED, MATCH_ALL, SUN_EVENT_SUNRISE, SUN_EVENT_SUNSET, ) from homeassistant.core import ( CALLBACK_TYPE, Event, HassJob, HomeAssistant, State, callback, split_entity_id, ) from homeassistant.exceptions import TemplateError from homeassistant.helpers.entity_registry import EVENT_ENTITY_REGISTRY_UPDATED from homeassistant.helpers.ratelimit import KeyedRateLimit from homeassistant.helpers.sun import get_astral_event_next from homeassistant.helpers.template import RenderInfo, Template, result_as_boolean from homeassistant.helpers.typing import TemplateVarsType from homeassistant.loader import bind_hass from homeassistant.util import dt as dt_util from homeassistant.util.async_ import run_callback_threadsafe TRACK_STATE_CHANGE_CALLBACKS = "track_state_change_callbacks" TRACK_STATE_CHANGE_LISTENER = "track_state_change_listener" TRACK_STATE_ADDED_DOMAIN_CALLBACKS = "track_state_added_domain_callbacks" TRACK_STATE_ADDED_DOMAIN_LISTENER = "track_state_added_domain_listener" TRACK_STATE_REMOVED_DOMAIN_CALLBACKS = "track_state_removed_domain_callbacks" TRACK_STATE_REMOVED_DOMAIN_LISTENER = "track_state_removed_domain_listener" TRACK_ENTITY_REGISTRY_UPDATED_CALLBACKS = "track_entity_registry_updated_callbacks" TRACK_ENTITY_REGISTRY_UPDATED_LISTENER = "track_entity_registry_updated_listener" _ALL_LISTENER = "all" _DOMAINS_LISTENER = "domains" _ENTITIES_LISTENER = "entities" _LOGGER = logging.getLogger(__name__) @dataclass class TrackStates: """Class for keeping track of states being tracked. all_states: All states on the system are being tracked entities: Entities to track domains: Domains to track """ all_states: bool entities: set domains: set @dataclass class TrackTemplate: """Class for keeping track of a template with variables. The template is template to calculate. The variables are variables to pass to the template. The rate_limit is a rate limit on how often the template is re-rendered. """ template: Template variables: TemplateVarsType rate_limit: timedelta | None = None @dataclass class TrackTemplateResult: """Class for result of template tracking. template The template that has changed. last_result The output from the template on the last successful run, or None if no previous successful run. result Result from the template run. This will be a string or an TemplateError if the template resulted in an error. """ template: Template last_result: Any result: Any def threaded_listener_factory( async_factory: Callable[..., Any] ) -> Callable[..., CALLBACK_TYPE]: """Convert an async event helper to a threaded one.""" @ft.wraps(async_factory) def factory(*args: Any, **kwargs: Any) -> CALLBACK_TYPE: """Call async event helper safely.""" hass = args[0] if not isinstance(hass, HomeAssistant): raise TypeError("First parameter needs to be a hass instance") async_remove = run_callback_threadsafe( hass.loop, ft.partial(async_factory, *args, **kwargs) ).result() def remove() -> None: """Threadsafe removal.""" run_callback_threadsafe(hass.loop, async_remove).result() return remove return factory @callback @bind_hass def async_track_state_change( hass: HomeAssistant, entity_ids: str | Iterable[str], action: Callable[[str, State, State], None], from_state: None | str | Iterable[str] = None, to_state: None | str | Iterable[str] = None, ) -> CALLBACK_TYPE: """Track specific state changes. entity_ids, from_state and to_state can be string or list. Use list to match multiple. Returns a function that can be called to remove the listener. If entity_ids are not MATCH_ALL along with from_state and to_state being None, async_track_state_change_event should be used instead as it is slightly faster. Must be run within the event loop. """ if from_state is not None: match_from_state = process_state_match(from_state) if to_state is not None: match_to_state = process_state_match(to_state) # Ensure it is a lowercase list with entity ids we want to match on if entity_ids == MATCH_ALL: pass elif isinstance(entity_ids, str): entity_ids = (entity_ids.lower(),) else: entity_ids = tuple(entity_id.lower() for entity_id in entity_ids) job = HassJob(action) @callback def state_change_filter(event: Event) -> bool: """Handle specific state changes.""" if from_state is not None: old_state = event.data.get("old_state") if old_state is not None: old_state = old_state.state if not match_from_state(old_state): return False if to_state is not None: new_state = event.data.get("new_state") if new_state is not None: new_state = new_state.state if not match_to_state(new_state): return False return True @callback def state_change_dispatcher(event: Event) -> None: """Handle specific state changes.""" hass.async_run_hass_job( job, event.data.get("entity_id"), event.data.get("old_state"), event.data.get("new_state"), ) @callback def state_change_listener(event: Event) -> None: """Handle specific state changes.""" if not state_change_filter(event): return state_change_dispatcher(event) if entity_ids != MATCH_ALL: # If we have a list of entity ids we use # async_track_state_change_event to route # by entity_id to avoid iterating though state change # events and creating a jobs where the most # common outcome is to return right away because # the entity_id does not match since usually # only one or two listeners want that specific # entity_id. return async_track_state_change_event(hass, entity_ids, state_change_listener) return hass.bus.async_listen( EVENT_STATE_CHANGED, state_change_dispatcher, event_filter=state_change_filter ) track_state_change = threaded_listener_factory(async_track_state_change) @bind_hass def async_track_state_change_event( hass: HomeAssistant, entity_ids: str | Iterable[str], action: Callable[[Event], Any], ) -> Callable[[], None]: """Track specific state change events indexed by entity_id. Unlike async_track_state_change, async_track_state_change_event passes the full event to the callback. In order to avoid having to iterate a long list of EVENT_STATE_CHANGED and fire and create a job for each one, we keep a dict of entity ids that care about the state change events so we can do a fast dict lookup to route events. """ entity_ids = _async_string_to_lower_list(entity_ids) if not entity_ids: return _remove_empty_listener entity_callbacks = hass.data.setdefault(TRACK_STATE_CHANGE_CALLBACKS, {}) if TRACK_STATE_CHANGE_LISTENER not in hass.data: @callback def _async_state_change_filter(event: Event) -> bool: """Filter state changes by entity_id.""" return event.data.get("entity_id") in entity_callbacks @callback def _async_state_change_dispatcher(event: Event) -> None: """Dispatch state changes by entity_id.""" entity_id = event.data.get("entity_id") if entity_id not in entity_callbacks: return for job in entity_callbacks[entity_id][:]: try: hass.async_run_hass_job(job, event) except Exception: # pylint: disable=broad-except _LOGGER.exception( "Error while processing state change for %s", entity_id ) hass.data[TRACK_STATE_CHANGE_LISTENER] = hass.bus.async_listen( EVENT_STATE_CHANGED, _async_state_change_dispatcher, event_filter=_async_state_change_filter, ) job = HassJob(action) for entity_id in entity_ids: entity_callbacks.setdefault(entity_id, []).append(job) @callback def remove_listener() -> None: """Remove state change listener.""" _async_remove_indexed_listeners( hass, TRACK_STATE_CHANGE_CALLBACKS, TRACK_STATE_CHANGE_LISTENER, entity_ids, job, ) return remove_listener @callback def _remove_empty_listener() -> None: """Remove a listener that does nothing.""" @callback def _async_remove_indexed_listeners( hass: HomeAssistant, data_key: str, listener_key: str, storage_keys: Iterable[str], job: HassJob, ) -> None: """Remove a listener.""" callbacks = hass.data[data_key] for storage_key in storage_keys: callbacks[storage_key].remove(job) if len(callbacks[storage_key]) == 0: del callbacks[storage_key] if not callbacks: hass.data[listener_key]() del hass.data[listener_key] @bind_hass def async_track_entity_registry_updated_event( hass: HomeAssistant, entity_ids: str | Iterable[str], action: Callable[[Event], Any], ) -> Callable[[], None]: """Track specific entity registry updated events indexed by entity_id. Similar to async_track_state_change_event. """ entity_ids = _async_string_to_lower_list(entity_ids) if not entity_ids: return _remove_empty_listener entity_callbacks = hass.data.setdefault(TRACK_ENTITY_REGISTRY_UPDATED_CALLBACKS, {}) if TRACK_ENTITY_REGISTRY_UPDATED_LISTENER not in hass.data: @callback def _async_entity_registry_updated_filter(event: Event) -> bool: """Filter entity registry updates by entity_id.""" entity_id = event.data.get("old_entity_id", event.data["entity_id"]) return entity_id in entity_callbacks @callback def _async_entity_registry_updated_dispatcher(event: Event) -> None: """Dispatch entity registry updates by entity_id.""" entity_id = event.data.get("old_entity_id", event.data["entity_id"]) if entity_id not in entity_callbacks: return for job in entity_callbacks[entity_id][:]: try: hass.async_run_hass_job(job, event) except Exception: # pylint: disable=broad-except _LOGGER.exception( "Error while processing entity registry update for %s", entity_id, ) hass.data[TRACK_ENTITY_REGISTRY_UPDATED_LISTENER] = hass.bus.async_listen( EVENT_ENTITY_REGISTRY_UPDATED, _async_entity_registry_updated_dispatcher, event_filter=_async_entity_registry_updated_filter, ) job = HassJob(action) for entity_id in entity_ids: entity_callbacks.setdefault(entity_id, []).append(job) @callback def remove_listener() -> None: """Remove state change listener.""" _async_remove_indexed_listeners( hass, TRACK_ENTITY_REGISTRY_UPDATED_CALLBACKS, TRACK_ENTITY_REGISTRY_UPDATED_LISTENER, entity_ids, job, ) return remove_listener @callback def _async_dispatch_domain_event( hass: HomeAssistant, event: Event, callbacks: dict[str, list] ) -> None: domain = split_entity_id(event.data["entity_id"])[0] if domain not in callbacks and MATCH_ALL not in callbacks: return listeners = callbacks.get(domain, []) + callbacks.get(MATCH_ALL, []) for job in listeners: try: hass.async_run_hass_job(job, event) except Exception: # pylint: disable=broad-except _LOGGER.exception( "Error while processing event %s for domain %s", event, domain ) @bind_hass def async_track_state_added_domain( hass: HomeAssistant, domains: str | Iterable[str], action: Callable[[Event], Any], ) -> Callable[[], None]: """Track state change events when an entity is added to domains.""" domains = _async_string_to_lower_list(domains) if not domains: return _remove_empty_listener domain_callbacks = hass.data.setdefault(TRACK_STATE_ADDED_DOMAIN_CALLBACKS, {}) if TRACK_STATE_ADDED_DOMAIN_LISTENER not in hass.data: @callback def _async_state_change_filter(event: Event) -> bool: """Filter state changes by entity_id.""" return event.data.get("old_state") is None @callback def _async_state_change_dispatcher(event: Event) -> None: """Dispatch state changes by entity_id.""" if event.data.get("old_state") is not None: return _async_dispatch_domain_event(hass, event, domain_callbacks) hass.data[TRACK_STATE_ADDED_DOMAIN_LISTENER] = hass.bus.async_listen( EVENT_STATE_CHANGED, _async_state_change_dispatcher, event_filter=_async_state_change_filter, ) job = HassJob(action) for domain in domains: domain_callbacks.setdefault(domain, []).append(job) @callback def remove_listener() -> None: """Remove state change listener.""" _async_remove_indexed_listeners( hass, TRACK_STATE_ADDED_DOMAIN_CALLBACKS, TRACK_STATE_ADDED_DOMAIN_LISTENER, domains, job, ) return remove_listener @bind_hass def async_track_state_removed_domain( hass: HomeAssistant, domains: str | Iterable[str], action: Callable[[Event], Any], ) -> Callable[[], None]: """Track state change events when an entity is removed from domains.""" domains = _async_string_to_lower_list(domains) if not domains: return _remove_empty_listener domain_callbacks = hass.data.setdefault(TRACK_STATE_REMOVED_DOMAIN_CALLBACKS, {}) if TRACK_STATE_REMOVED_DOMAIN_LISTENER not in hass.data: @callback def _async_state_change_filter(event: Event) -> bool: """Filter state changes by entity_id.""" return event.data.get("new_state") is None @callback def _async_state_change_dispatcher(event: Event) -> None: """Dispatch state changes by entity_id.""" if event.data.get("new_state") is not None: return _async_dispatch_domain_event(hass, event, domain_callbacks) hass.data[TRACK_STATE_REMOVED_DOMAIN_LISTENER] = hass.bus.async_listen( EVENT_STATE_CHANGED, _async_state_change_dispatcher, event_filter=_async_state_change_filter, ) job = HassJob(action) for domain in domains: domain_callbacks.setdefault(domain, []).append(job) @callback def remove_listener() -> None: """Remove state change listener.""" _async_remove_indexed_listeners( hass, TRACK_STATE_REMOVED_DOMAIN_CALLBACKS, TRACK_STATE_REMOVED_DOMAIN_LISTENER, domains, job, ) return remove_listener @callback def _async_string_to_lower_list(instr: str | Iterable[str]) -> list[str]: if isinstance(instr, str): return [instr.lower()] return [mstr.lower() for mstr in instr] class _TrackStateChangeFiltered: """Handle removal / refresh of tracker.""" def __init__( self, hass: HomeAssistant, track_states: TrackStates, action: Callable[[Event], Any], ): """Handle removal / refresh of tracker init.""" self.hass = hass self._action = action self._listeners: dict[str, Callable] = {} self._last_track_states: TrackStates = track_states @callback def async_setup(self) -> None: """Create listeners to track states.""" track_states = self._last_track_states if ( not track_states.all_states and not track_states.domains and not track_states.entities ): return if track_states.all_states: self._setup_all_listener() return self._setup_domains_listener(track_states.domains) self._setup_entities_listener(track_states.domains, track_states.entities) @property def listeners(self) -> dict: """State changes that will cause a re-render.""" track_states = self._last_track_states return { _ALL_LISTENER: track_states.all_states, _ENTITIES_LISTENER: track_states.entities, _DOMAINS_LISTENER: track_states.domains, } @callback def async_update_listeners(self, new_track_states: TrackStates) -> None: """Update the listeners based on the new TrackStates.""" last_track_states = self._last_track_states self._last_track_states = new_track_states had_all_listener = last_track_states.all_states if new_track_states.all_states: if had_all_listener: return self._cancel_listener(_DOMAINS_LISTENER) self._cancel_listener(_ENTITIES_LISTENER) self._setup_all_listener() return if had_all_listener: self._cancel_listener(_ALL_LISTENER) domains_changed = new_track_states.domains != last_track_states.domains if had_all_listener or domains_changed: domains_changed = True self._cancel_listener(_DOMAINS_LISTENER) self._setup_domains_listener(new_track_states.domains) if ( had_all_listener or domains_changed or new_track_states.entities != last_track_states.entities ): self._cancel_listener(_ENTITIES_LISTENER) self._setup_entities_listener( new_track_states.domains, new_track_states.entities ) @callback def async_remove(self) -> None: """Cancel the listeners.""" for key in list(self._listeners): self._listeners.pop(key)() @callback def _cancel_listener(self, listener_name: str) -> None: if listener_name not in self._listeners: return self._listeners.pop(listener_name)() @callback def _setup_entities_listener(self, domains: set, entities: set) -> None: if domains: entities = entities.copy() entities.update(self.hass.states.async_entity_ids(domains)) # Entities has changed to none if not entities: return self._listeners[_ENTITIES_LISTENER] = async_track_state_change_event( self.hass, entities, self._action ) @callback def _setup_domains_listener(self, domains: set) -> None: if not domains: return self._listeners[_DOMAINS_LISTENER] = async_track_state_added_domain( self.hass, domains, self._action ) @callback def _setup_all_listener(self) -> None: self._listeners[_ALL_LISTENER] = self.hass.bus.async_listen( EVENT_STATE_CHANGED, self._action ) @callback @bind_hass def async_track_state_change_filtered( hass: HomeAssistant, track_states: TrackStates, action: Callable[[Event], Any], ) -> _TrackStateChangeFiltered: """Track state changes with a TrackStates filter that can be updated. Parameters ---------- hass Home assistant object. track_states A TrackStates data class. action Callable to call with results. Returns ------- Object used to update the listeners (async_update_listeners) with a new TrackStates or cancel the tracking (async_remove). """ tracker = _TrackStateChangeFiltered(hass, track_states, action) tracker.async_setup() return tracker @callback @bind_hass def async_track_template( hass: HomeAssistant, template: Template, action: Callable[[str, State | None, State | None], None], variables: TemplateVarsType | None = None, ) -> Callable[[], None]: """Add a listener that fires when a a template evaluates to 'true'. Listen for the result of the template becoming true, or a true-like string result, such as 'On', 'Open', or 'Yes'. If the template results in an error state when the value changes, this will be logged and not passed through. If the initial check of the template is invalid and results in an exception, the listener will still be registered but will only fire if the template result becomes true without an exception. Action arguments ---------------- entity_id ID of the entity that triggered the state change. old_state The old state of the entity that changed. new_state New state of the entity that changed. Parameters ---------- hass Home assistant object. template The template to calculate. action Callable to call with results. See above for arguments. variables Variables to pass to the template. Returns ------- Callable to unregister the listener. """ job = HassJob(action) @callback def _template_changed_listener( event: Event, updates: list[TrackTemplateResult] ) -> None: """Check if condition is correct and run action.""" track_result = updates.pop() template = track_result.template last_result = track_result.last_result result = track_result.result if isinstance(result, TemplateError): _LOGGER.error( "Error while processing template: %s", template.template, exc_info=result, ) return if ( not isinstance(last_result, TemplateError) and result_as_boolean(last_result) or not result_as_boolean(result) ): return hass.async_run_hass_job( job, event and event.data.get("entity_id"), event and event.data.get("old_state"), event and event.data.get("new_state"), ) info = async_track_template_result( hass, [TrackTemplate(template, variables)], _template_changed_listener ) return info.async_remove track_template = threaded_listener_factory(async_track_template) class _TrackTemplateResultInfo: """Handle removal / refresh of tracker.""" def __init__( self, hass: HomeAssistant, track_templates: Iterable[TrackTemplate], action: Callable, ): """Handle removal / refresh of tracker init.""" self.hass = hass self._job = HassJob(action) for track_template_ in track_templates: track_template_.template.hass = hass self._track_templates = track_templates self._last_result: dict[Template, str | TemplateError] = {} self._rate_limit = KeyedRateLimit(hass) self._info: dict[Template, RenderInfo] = {} self._track_state_changes: _TrackStateChangeFiltered | None = None self._time_listeners: dict[Template, Callable] = {} def async_setup(self, raise_on_template_error: bool) -> None: """Activation of template tracking.""" for track_template_ in self._track_templates: template = track_template_.template variables = track_template_.variables self._info[template] = info = template.async_render_to_info(variables) if info.exception: if raise_on_template_error: raise info.exception _LOGGER.error( "Error while processing template: %s", track_template_.template, exc_info=info.exception, ) self._track_state_changes = async_track_state_change_filtered( self.hass, _render_infos_to_track_states(self._info.values()), self._refresh ) self._update_time_listeners() _LOGGER.debug( "Template group %s listens for %s", self._track_templates, self.listeners, ) @property def listeners(self) -> dict: """State changes that will cause a re-render.""" assert self._track_state_changes return { **self._track_state_changes.listeners, "time": bool(self._time_listeners), } @callback def _setup_time_listener(self, template: Template, has_time: bool) -> None: if not has_time: if template in self._time_listeners: # now() or utcnow() has left the scope of the template self._time_listeners.pop(template)() return if template in self._time_listeners: return track_templates = [ track_template_ for track_template_ in self._track_templates if track_template_.template == template ] @callback def _refresh_from_time(now: datetime) -> None: self._refresh(None, track_templates=track_templates) self._time_listeners[template] = async_track_utc_time_change( self.hass, _refresh_from_time, second=0 ) @callback def _update_time_listeners(self) -> None: for template, info in self._info.items(): self._setup_time_listener(template, info.has_time) @callback def async_remove(self) -> None: """Cancel the listener.""" assert self._track_state_changes self._track_state_changes.async_remove() self._rate_limit.async_remove() for template in list(self._time_listeners): self._time_listeners.pop(template)() @callback def async_refresh(self) -> None: """Force recalculate the template.""" self._refresh(None) def _render_template_if_ready( self, track_template_: TrackTemplate, now: datetime, event: Event | None, ) -> bool | TrackTemplateResult: """Re-render the template if conditions match. Returns False if the template was not be re-rendered Returns True if the template re-rendered and did not change. Returns TrackTemplateResult if the template re-render generates a new result. """ template = track_template_.template if event: info = self._info[template] if not _event_triggers_rerender(event, info): return False had_timer = self._rate_limit.async_has_timer(template) if self._rate_limit.async_schedule_action( template, _rate_limit_for_event(event, info, track_template_), now, self._refresh, event, (track_template_,), True, ): return not had_timer _LOGGER.debug( "Template update %s triggered by event: %s", template.template, event, ) self._rate_limit.async_triggered(template, now) self._info[template] = info = template.async_render_to_info( track_template_.variables ) try: result: str | TemplateError = info.result() except TemplateError as ex: result = ex last_result = self._last_result.get(template) # Check to see if the result has changed if result == last_result: return True if isinstance(result, TemplateError) and isinstance(last_result, TemplateError): return True return TrackTemplateResult(template, last_result, result) @callback def _refresh( self, event: Event | None, track_templates: Iterable[TrackTemplate] | None = None, replayed: bool | None = False, ) -> None: """Refresh the template. The event is the state_changed event that caused the refresh to be considered. track_templates is an optional list of TrackTemplate objects to refresh. If not provided, all tracked templates will be considered. replayed is True if the event is being replayed because the rate limit was hit. """ updates = [] info_changed = False now = event.time_fired if not replayed and event else dt_util.utcnow() for track_template_ in track_templates or self._track_templates: update = self._render_template_if_ready(track_template_, now, event) if not update: continue template = track_template_.template self._setup_time_listener(template, self._info[template].has_time) info_changed = True if isinstance(update, TrackTemplateResult): updates.append(update) if info_changed: assert self._track_state_changes self._track_state_changes.async_update_listeners( _render_infos_to_track_states( [ _suppress_domain_all_in_render_info(self._info[template]) if self._rate_limit.async_has_timer(template) else self._info[template] for template in self._info ] ) ) _LOGGER.debug( "Template group %s listens for %s", self._track_templates, self.listeners, ) if not updates: return for track_result in updates: self._last_result[track_result.template] = track_result.result self.hass.async_run_hass_job(self._job, event, updates) TrackTemplateResultListener = Callable[ [ Event, List[TrackTemplateResult], ], None, ] """Type for the listener for template results. Action arguments ---------------- event Event that caused the template to change output. None if not triggered by an event. updates A list of TrackTemplateResult """ @callback @bind_hass def async_track_template_result( hass: HomeAssistant, track_templates: Iterable[TrackTemplate], action: TrackTemplateResultListener, raise_on_template_error: bool = False, ) -> _TrackTemplateResultInfo: """Add a listener that fires when the result of a template changes. The action will fire with the initial result from the template, and then whenever the output from the template changes. The template will be reevaluated if any states referenced in the last run of the template change, or if manually triggered. If the result of the evaluation is different from the previous run, the listener is passed the result. If the template results in an TemplateError, this will be returned to the listener the first time this happens but not for subsequent errors. Once the template returns to a non-error condition the result is sent to the action as usual. Parameters ---------- hass Home assistant object. track_templates An iterable of TrackTemplate. action Callable to call with results. raise_on_template_error When set to True, if there is an exception processing the template during setup, the system will raise the exception instead of setting up tracking. Returns ------- Info object used to unregister the listener, and refresh the template. """ tracker = _TrackTemplateResultInfo(hass, track_templates, action) tracker.async_setup(raise_on_template_error) return tracker @callback @bind_hass def async_track_same_state( hass: HomeAssistant, period: timedelta, action: Callable[..., None], async_check_same_func: Callable[[str, State | None, State | None], bool], entity_ids: str | Iterable[str] = MATCH_ALL, ) -> CALLBACK_TYPE: """Track the state of entities for a period and run an action. If async_check_func is None it use the state of orig_value. Without entity_ids we track all state changes. """ async_remove_state_for_cancel: CALLBACK_TYPE | None = None async_remove_state_for_listener: CALLBACK_TYPE | None = None job = HassJob(action) @callback def clear_listener() -> None: """Clear all unsub listener.""" nonlocal async_remove_state_for_cancel, async_remove_state_for_listener if async_remove_state_for_listener is not None: async_remove_state_for_listener() async_remove_state_for_listener = None if async_remove_state_for_cancel is not None: async_remove_state_for_cancel() async_remove_state_for_cancel = None @callback def state_for_listener(now: Any) -> None: """Fire on state changes after a delay and calls action.""" nonlocal async_remove_state_for_listener async_remove_state_for_listener = None clear_listener() hass.async_run_hass_job(job) @callback def state_for_cancel_listener(event: Event) -> None: """Fire on changes and cancel for listener if changed.""" entity: str = event.data["entity_id"] from_state: State | None = event.data.get("old_state") to_state: State | None = event.data.get("new_state") if not async_check_same_func(entity, from_state, to_state): clear_listener() async_remove_state_for_listener = async_track_point_in_utc_time( hass, state_for_listener, dt_util.utcnow() + period ) if entity_ids == MATCH_ALL: async_remove_state_for_cancel = hass.bus.async_listen( EVENT_STATE_CHANGED, state_for_cancel_listener ) else: async_remove_state_for_cancel = async_track_state_change_event( hass, [entity_ids] if isinstance(entity_ids, str) else entity_ids, state_for_cancel_listener, ) return clear_listener track_same_state = threaded_listener_factory(async_track_same_state) @callback @bind_hass def async_track_point_in_time( hass: HomeAssistant, action: HassJob | Callable[..., None], point_in_time: datetime, ) -> CALLBACK_TYPE: """Add a listener that fires once after a specific point in time.""" job = action if isinstance(action, HassJob) else HassJob(action) @callback def utc_converter(utc_now: datetime) -> None: """Convert passed in UTC now to local now.""" hass.async_run_hass_job(job, dt_util.as_local(utc_now)) return async_track_point_in_utc_time(hass, utc_converter, point_in_time) track_point_in_time = threaded_listener_factory(async_track_point_in_time) @callback @bind_hass def async_track_point_in_utc_time( hass: HomeAssistant, action: HassJob | Callable[..., None], point_in_time: datetime, ) -> CALLBACK_TYPE: """Add a listener that fires once after a specific point in UTC time.""" # Ensure point_in_time is UTC utc_point_in_time = dt_util.as_utc(point_in_time) # Since this is called once, we accept a HassJob so we can avoid # having to figure out how to call the action every time its called. job = action if isinstance(action, HassJob) else HassJob(action) cancel_callback: asyncio.TimerHandle | None = None @callback def run_action() -> None: """Call the action.""" nonlocal cancel_callback now = time_tracker_utcnow() # Depending on the available clock support (including timer hardware # and the OS kernel) it can happen that we fire a little bit too early # as measured by utcnow(). That is bad when callbacks have assumptions # about the current time. Thus, we rearm the timer for the remaining # time. delta = (utc_point_in_time - now).total_seconds() if delta > 0: _LOGGER.debug("Called %f seconds too early, rearming", delta) cancel_callback = hass.loop.call_later(delta, run_action) return hass.async_run_hass_job(job, utc_point_in_time) delta = utc_point_in_time.timestamp() - time.time() cancel_callback = hass.loop.call_later(delta, run_action) @callback def unsub_point_in_time_listener() -> None: """Cancel the call_later.""" assert cancel_callback is not None cancel_callback.cancel() return unsub_point_in_time_listener track_point_in_utc_time = threaded_listener_factory(async_track_point_in_utc_time) @callback @bind_hass def async_call_later( hass: HomeAssistant, delay: float, action: HassJob | Callable[..., None] ) -> CALLBACK_TYPE: """Add a listener that is called in <delay>.""" return async_track_point_in_utc_time( hass, action, dt_util.utcnow() + timedelta(seconds=delay) ) call_later = threaded_listener_factory(async_call_later) @callback @bind_hass def async_track_time_interval( hass: HomeAssistant, action: Callable[..., None | Awaitable], interval: timedelta, ) -> CALLBACK_TYPE: """Add a listener that fires repetitively at every timedelta interval.""" remove = None interval_listener_job = None job = HassJob(action) def next_interval() -> datetime: """Return the next interval.""" return dt_util.utcnow() + interval @callback def interval_listener(now: datetime) -> None: """Handle elapsed intervals.""" nonlocal remove nonlocal interval_listener_job remove = async_track_point_in_utc_time( hass, interval_listener_job, next_interval() # type: ignore ) hass.async_run_hass_job(job, now) interval_listener_job = HassJob(interval_listener) remove = async_track_point_in_utc_time(hass, interval_listener_job, next_interval()) def remove_listener() -> None: """Remove interval listener.""" remove() # type: ignore return remove_listener track_time_interval = threaded_listener_factory(async_track_time_interval) @attr.s class SunListener: """Helper class to help listen to sun events.""" hass: HomeAssistant = attr.ib() job: HassJob = attr.ib() event: str = attr.ib() offset: timedelta | None = attr.ib() _unsub_sun: CALLBACK_TYPE | None = attr.ib(default=None) _unsub_config: CALLBACK_TYPE | None = attr.ib(default=None) @callback def async_attach(self) -> None: """Attach a sun listener.""" assert self._unsub_config is None self._unsub_config = self.hass.bus.async_listen( EVENT_CORE_CONFIG_UPDATE, self._handle_config_event ) self._listen_next_sun_event() @callback def async_detach(self) -> None: """Detach the sun listener.""" assert self._unsub_sun is not None assert self._unsub_config is not None self._unsub_sun() self._unsub_sun = None self._unsub_config() self._unsub_config = None @callback def _listen_next_sun_event(self) -> None: """Set up the sun event listener.""" assert self._unsub_sun is None self._unsub_sun = async_track_point_in_utc_time( self.hass, self._handle_sun_event, get_astral_event_next(self.hass, self.event, offset=self.offset), ) @callback def _handle_sun_event(self, _now: Any) -> None: """Handle solar event.""" self._unsub_sun = None self._listen_next_sun_event() self.hass.async_run_hass_job(self.job) @callback def _handle_config_event(self, _event: Any) -> None: """Handle core config update.""" assert self._unsub_sun is not None self._unsub_sun() self._unsub_sun = None self._listen_next_sun_event() @callback @bind_hass def async_track_sunrise( hass: HomeAssistant, action: Callable[..., None], offset: timedelta | None = None ) -> CALLBACK_TYPE: """Add a listener that will fire a specified offset from sunrise daily.""" listener = SunListener(hass, HassJob(action), SUN_EVENT_SUNRISE, offset) listener.async_attach() return listener.async_detach track_sunrise = threaded_listener_factory(async_track_sunrise) @callback @bind_hass def async_track_sunset( hass: HomeAssistant, action: Callable[..., None], offset: timedelta | None = None ) -> CALLBACK_TYPE: """Add a listener that will fire a specified offset from sunset daily.""" listener = SunListener(hass, HassJob(action), SUN_EVENT_SUNSET, offset) listener.async_attach() return listener.async_detach track_sunset = threaded_listener_factory(async_track_sunset) # For targeted patching in tests time_tracker_utcnow = dt_util.utcnow @callback @bind_hass def async_track_utc_time_change( hass: HomeAssistant, action: Callable[..., None], hour: Any | None = None, minute: Any | None = None, second: Any | None = None, local: bool = False, ) -> CALLBACK_TYPE: """Add a listener that will fire if time matches a pattern.""" job = HassJob(action) # We do not have to wrap the function with time pattern matching logic # if no pattern given if all(val is None for val in (hour, minute, second)): @callback def time_change_listener(event: Event) -> None: """Fire every time event that comes in.""" hass.async_run_hass_job(job, event.data[ATTR_NOW]) return hass.bus.async_listen(EVENT_TIME_CHANGED, time_change_listener) matching_seconds = dt_util.parse_time_expression(second, 0, 59) matching_minutes = dt_util.parse_time_expression(minute, 0, 59) matching_hours = dt_util.parse_time_expression(hour, 0, 23) def calculate_next(now: datetime) -> datetime: """Calculate and set the next time the trigger should fire.""" localized_now = dt_util.as_local(now) if local else now return dt_util.find_next_time_expression_time( localized_now, matching_seconds, matching_minutes, matching_hours ) time_listener: CALLBACK_TYPE | None = None @callback def pattern_time_change_listener(_: datetime) -> None: """Listen for matching time_changed events.""" nonlocal time_listener now = time_tracker_utcnow() hass.async_run_hass_job(job, dt_util.as_local(now) if local else now) time_listener = async_track_point_in_utc_time( hass, pattern_time_change_listener, calculate_next(now + timedelta(seconds=1)), ) time_listener = async_track_point_in_utc_time( hass, pattern_time_change_listener, calculate_next(dt_util.utcnow()) ) @callback def unsub_pattern_time_change_listener() -> None: """Cancel the time listener.""" assert time_listener is not None time_listener() return unsub_pattern_time_change_listener track_utc_time_change = threaded_listener_factory(async_track_utc_time_change) @callback @bind_hass def async_track_time_change( hass: HomeAssistant, action: Callable[..., None], hour: Any | None = None, minute: Any | None = None, second: Any | None = None, ) -> CALLBACK_TYPE: """Add a listener that will fire if UTC time matches a pattern.""" return async_track_utc_time_change(hass, action, hour, minute, second, local=True) track_time_change = threaded_listener_factory(async_track_time_change) def process_state_match(parameter: None | str | Iterable[str]) -> Callable[[str], bool]: """Convert parameter to function that matches input against parameter.""" if parameter is None or parameter == MATCH_ALL: return lambda _: True if isinstance(parameter, str) or not hasattr(parameter, "__iter__"): return lambda state: state == parameter parameter_set = set(parameter) return lambda state: state in parameter_set @callback def _entities_domains_from_render_infos( render_infos: Iterable[RenderInfo], ) -> tuple[set, set]: """Combine from multiple RenderInfo.""" entities = set() domains = set() for render_info in render_infos: if render_info.entities: entities.update(render_info.entities) if render_info.domains: domains.update(render_info.domains) if render_info.domains_lifecycle: domains.update(render_info.domains_lifecycle) return entities, domains @callback def _render_infos_needs_all_listener(render_infos: Iterable[RenderInfo]) -> bool: """Determine if an all listener is needed from RenderInfo.""" for render_info in render_infos: # Tracking all states if render_info.all_states or render_info.all_states_lifecycle: return True # Previous call had an exception # so we do not know which states # to track if render_info.exception: return True return False @callback def _render_infos_to_track_states(render_infos: Iterable[RenderInfo]) -> TrackStates: """Create a TrackStates dataclass from the latest RenderInfo.""" if _render_infos_needs_all_listener(render_infos): return TrackStates(True, set(), set()) return TrackStates(False, *_entities_domains_from_render_infos(render_infos)) @callback def _event_triggers_rerender(event: Event, info: RenderInfo) -> bool: """Determine if a template should be re-rendered from an event.""" entity_id = event.data.get(ATTR_ENTITY_ID) if info.filter(entity_id): return True if ( event.data.get("new_state") is not None and event.data.get("old_state") is not None ): return False return bool(info.filter_lifecycle(entity_id)) @callback def _rate_limit_for_event( event: Event, info: RenderInfo, track_template_: TrackTemplate ) -> timedelta | None: """Determine the rate limit for an event.""" entity_id = event.data.get(ATTR_ENTITY_ID) # Specifically referenced entities are excluded # from the rate limit if entity_id in info.entities: return None if track_template_.rate_limit is not None: return track_template_.rate_limit rate_limit: timedelta | None = info.rate_limit return rate_limit def _suppress_domain_all_in_render_info(render_info: RenderInfo) -> RenderInfo: """Remove the domains and all_states from render info during a ratelimit.""" rate_limited_render_info = copy.copy(render_info) rate_limited_render_info.all_states = False rate_limited_render_info.all_states_lifecycle = False rate_limited_render_info.domains = set() rate_limited_render_info.domains_lifecycle = set() return rate_limited_render_info
"""The tests for Lock device conditions.""" import pytest import homeassistant.components.automation as automation from homeassistant.components.lock import DOMAIN from homeassistant.const import STATE_LOCKED, STATE_UNLOCKED from homeassistant.helpers import device_registry from homeassistant.setup import async_setup_component from tests.common import ( MockConfigEntry, assert_lists_same, async_get_device_automations, async_mock_service, mock_device_registry, mock_registry, ) from tests.components.blueprint.conftest import stub_blueprint_populate # noqa: F401 @pytest.fixture def device_reg(hass): """Return an empty, loaded, registry.""" return mock_device_registry(hass) @pytest.fixture def entity_reg(hass): """Return an empty, loaded, registry.""" return mock_registry(hass) @pytest.fixture def calls(hass): """Track calls to a mock service.""" return async_mock_service(hass, "test", "automation") async def test_get_conditions(hass, device_reg, entity_reg): """Test we get the expected conditions from a lock.""" config_entry = MockConfigEntry(domain="test", data={}) config_entry.add_to_hass(hass) device_entry = device_reg.async_get_or_create( config_entry_id=config_entry.entry_id, connections={(device_registry.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")}, ) entity_reg.async_get_or_create(DOMAIN, "test", "5678", device_id=device_entry.id) expected_conditions = [ { "condition": "device", "domain": DOMAIN, "type": "is_locked", "device_id": device_entry.id, "entity_id": f"{DOMAIN}.test_5678", }, { "condition": "device", "domain": DOMAIN, "type": "is_unlocked", "device_id": device_entry.id, "entity_id": f"{DOMAIN}.test_5678", }, ] conditions = await async_get_device_automations(hass, "condition", device_entry.id) assert_lists_same(conditions, expected_conditions) async def test_if_state(hass, calls): """Test for turn_on and turn_off conditions.""" hass.states.async_set("lock.entity", STATE_LOCKED) assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: [ { "trigger": {"platform": "event", "event_type": "test_event1"}, "condition": [ { "condition": "device", "domain": DOMAIN, "device_id": "", "entity_id": "lock.entity", "type": "is_locked", } ], "action": { "service": "test.automation", "data_template": { "some": "is_locked - {{ trigger.platform }} - {{ trigger.event.event_type }}" }, }, }, { "trigger": {"platform": "event", "event_type": "test_event2"}, "condition": [ { "condition": "device", "domain": DOMAIN, "device_id": "", "entity_id": "lock.entity", "type": "is_unlocked", } ], "action": { "service": "test.automation", "data_template": { "some": "is_unlocked - {{ trigger.platform }} - {{ trigger.event.event_type }}" }, }, }, ] }, ) hass.bus.async_fire("test_event1") hass.bus.async_fire("test_event2") await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data["some"] == "is_locked - event - test_event1" hass.states.async_set("lock.entity", STATE_UNLOCKED) hass.bus.async_fire("test_event1") hass.bus.async_fire("test_event2") await hass.async_block_till_done() assert len(calls) == 2 assert calls[1].data["some"] == "is_unlocked - event - test_event2"
adrienbrault/home-assistant
tests/components/lock/test_device_condition.py
homeassistant/helpers/event.py
""" python generate_sparsetools.py Generate manual wrappers for C++ sparsetools code. Type codes used: 'i': integer scalar 'I': integer array 'T': data array 'B': boolean array 'V': std::vector<integer>* 'W': std::vector<data>* '*': indicates that the next argument is an output argument 'v': void 'l': 64-bit integer scalar See sparsetools.cxx for more details. """ import optparse import os from distutils.dep_util import newer # # List of all routines and their argument types. # # The first code indicates the return value, the rest the arguments. # # bsr.h BSR_ROUTINES = """ bsr_diagonal v iiiiiIIT*T bsr_tocsr v iiiiIIT*I*I*T bsr_scale_rows v iiiiII*TT bsr_scale_columns v iiiiII*TT bsr_sort_indices v iiii*I*I*T bsr_transpose v iiiiIIT*I*I*T bsr_matmat_pass2 v iiiiiIITIIT*I*I*T bsr_matvec v iiiiIITT*T bsr_matvecs v iiiiiIITT*T bsr_elmul_bsr v iiiiIITIIT*I*I*T bsr_eldiv_bsr v iiiiIITIIT*I*I*T bsr_plus_bsr v iiiiIITIIT*I*I*T bsr_minus_bsr v iiiiIITIIT*I*I*T bsr_maximum_bsr v iiiiIITIIT*I*I*T bsr_minimum_bsr v iiiiIITIIT*I*I*T bsr_ne_bsr v iiiiIITIIT*I*I*B bsr_lt_bsr v iiiiIITIIT*I*I*B bsr_gt_bsr v iiiiIITIIT*I*I*B bsr_le_bsr v iiiiIITIIT*I*I*B bsr_ge_bsr v iiiiIITIIT*I*I*B """ # csc.h CSC_ROUTINES = """ csc_diagonal v iiiIIT*T csc_tocsr v iiIIT*I*I*T csc_matmat_pass1 v iiIIII*I csc_matmat_pass2 v iiIITIIT*I*I*T csc_matvec v iiIITT*T csc_matvecs v iiiIITT*T csc_elmul_csc v iiIITIIT*I*I*T csc_eldiv_csc v iiIITIIT*I*I*T csc_plus_csc v iiIITIIT*I*I*T csc_minus_csc v iiIITIIT*I*I*T csc_maximum_csc v iiIITIIT*I*I*T csc_minimum_csc v iiIITIIT*I*I*T csc_ne_csc v iiIITIIT*I*I*B csc_lt_csc v iiIITIIT*I*I*B csc_gt_csc v iiIITIIT*I*I*B csc_le_csc v iiIITIIT*I*I*B csc_ge_csc v iiIITIIT*I*I*B """ # csr.h CSR_ROUTINES = """ csr_matmat_pass1 v iiIIII*I csr_matmat_pass2 v iiIITIIT*I*I*T csr_diagonal v iiiIIT*T csr_tocsc v iiIIT*I*I*T csr_tobsr v iiiiIIT*I*I*T csr_todense v iiIIT*T csr_matvec v iiIITT*T csr_matvecs v iiiIITT*T csr_elmul_csr v iiIITIIT*I*I*T csr_eldiv_csr v iiIITIIT*I*I*T csr_plus_csr v iiIITIIT*I*I*T csr_minus_csr v iiIITIIT*I*I*T csr_maximum_csr v iiIITIIT*I*I*T csr_minimum_csr v iiIITIIT*I*I*T csr_ne_csr v iiIITIIT*I*I*B csr_lt_csr v iiIITIIT*I*I*B csr_gt_csr v iiIITIIT*I*I*B csr_le_csr v iiIITIIT*I*I*B csr_ge_csr v iiIITIIT*I*I*B csr_scale_rows v iiII*TT csr_scale_columns v iiII*TT csr_sort_indices v iI*I*T csr_eliminate_zeros v ii*I*I*T csr_sum_duplicates v ii*I*I*T get_csr_submatrix v iiIITiiii*V*V*W csr_row_index v iIIIT*I*T csr_row_slice v iiiIIT*I*T csr_column_index1 v iIiiII*I*I csr_column_index2 v IIiIT*I*T csr_sample_values v iiIITiII*T csr_count_blocks i iiiiII csr_sample_offsets i iiIIiII*I expandptr v iI*I test_throw_error i csr_has_sorted_indices i iII csr_has_canonical_format i iII """ # coo.h, dia.h, csgraph.h OTHER_ROUTINES = """ coo_tocsr v iiiIIT*I*I*T coo_todense v iilIIT*Ti coo_matvec v lIITT*T dia_matvec v iiiiITT*T cs_graph_components i iII*I """ # List of compilation units COMPILATION_UNITS = [ ('bsr', BSR_ROUTINES), ('csr', CSR_ROUTINES), ('csc', CSC_ROUTINES), ('other', OTHER_ROUTINES), ] # # List of the supported index typenums and the corresponding C++ types # I_TYPES = [ ('NPY_INT32', 'npy_int32'), ('NPY_INT64', 'npy_int64'), ] # # List of the supported data typenums and the corresponding C++ types # T_TYPES = [ ('NPY_BOOL', 'npy_bool_wrapper'), ('NPY_BYTE', 'npy_byte'), ('NPY_UBYTE', 'npy_ubyte'), ('NPY_SHORT', 'npy_short'), ('NPY_USHORT', 'npy_ushort'), ('NPY_INT', 'npy_int'), ('NPY_UINT', 'npy_uint'), ('NPY_LONG', 'npy_long'), ('NPY_ULONG', 'npy_ulong'), ('NPY_LONGLONG', 'npy_longlong'), ('NPY_ULONGLONG', 'npy_ulonglong'), ('NPY_FLOAT', 'npy_float'), ('NPY_DOUBLE', 'npy_double'), ('NPY_LONGDOUBLE', 'npy_longdouble'), ('NPY_CFLOAT', 'npy_cfloat_wrapper'), ('NPY_CDOUBLE', 'npy_cdouble_wrapper'), ('NPY_CLONGDOUBLE', 'npy_clongdouble_wrapper'), ] # # Code templates # THUNK_TEMPLATE = """ static PY_LONG_LONG %(name)s_thunk(int I_typenum, int T_typenum, void **a) { %(thunk_content)s } """ METHOD_TEMPLATE = """ NPY_VISIBILITY_HIDDEN PyObject * %(name)s_method(PyObject *self, PyObject *args) { return call_thunk('%(ret_spec)s', "%(arg_spec)s", %(name)s_thunk, args); } """ GET_THUNK_CASE_TEMPLATE = """ static int get_thunk_case(int I_typenum, int T_typenum) { %(content)s; return -1; } """ # # Code generation # def get_thunk_type_set(): """ Get a list containing cartesian product of data types, plus a getter routine. Returns ------- i_types : list [(j, I_typenum, None, I_type, None), ...] Pairing of index type numbers and the corresponding C++ types, and an unique index `j`. This is for routines that are parameterized only by I but not by T. it_types : list [(j, I_typenum, T_typenum, I_type, T_type), ...] Same as `i_types`, but for routines parameterized both by T and I. getter_code : str C++ code for a function that takes I_typenum, T_typenum and returns the unique index corresponding to the lists, or -1 if no match was found. """ it_types = [] i_types = [] j = 0 getter_code = " if (0) {}" for I_typenum, I_type in I_TYPES: piece = """ else if (I_typenum == %(I_typenum)s) { if (T_typenum == -1) { return %(j)s; }""" getter_code += piece % dict(I_typenum=I_typenum, j=j) i_types.append((j, I_typenum, None, I_type, None)) j += 1 for T_typenum, T_type in T_TYPES: piece = """ else if (T_typenum == %(T_typenum)s) { return %(j)s; }""" getter_code += piece % dict(T_typenum=T_typenum, j=j) it_types.append((j, I_typenum, T_typenum, I_type, T_type)) j += 1 getter_code += """ }""" return i_types, it_types, GET_THUNK_CASE_TEMPLATE % dict(content=getter_code) def parse_routine(name, args, types): """ Generate thunk and method code for a given routine. Parameters ---------- name : str Name of the C++ routine args : str Argument list specification (in format explained above) types : list List of types to instantiate, as returned `get_thunk_type_set` """ ret_spec = args[0] arg_spec = args[1:] def get_arglist(I_type, T_type): """ Generate argument list for calling the C++ function """ args = [] next_is_writeable = False j = 0 for t in arg_spec: const = '' if next_is_writeable else 'const ' next_is_writeable = False if t == '*': next_is_writeable = True continue elif t == 'i': args.append("*(%s*)a[%d]" % (const + I_type, j)) elif t == 'I': args.append("(%s*)a[%d]" % (const + I_type, j)) elif t == 'T': args.append("(%s*)a[%d]" % (const + T_type, j)) elif t == 'B': args.append("(npy_bool_wrapper*)a[%d]" % (j,)) elif t == 'V': if const: raise ValueError("'V' argument must be an output arg") args.append("(std::vector<%s>*)a[%d]" % (I_type, j,)) elif t == 'W': if const: raise ValueError("'W' argument must be an output arg") args.append("(std::vector<%s>*)a[%d]" % (T_type, j,)) elif t == 'l': args.append("*(%snpy_int64*)a[%d]" % (const, j)) else: raise ValueError("Invalid spec character %r" % (t,)) j += 1 return ", ".join(args) # Generate thunk code: a giant switch statement with different # type combinations inside. thunk_content = """int j = get_thunk_case(I_typenum, T_typenum); switch (j) {""" for j, I_typenum, T_typenum, I_type, T_type in types: arglist = get_arglist(I_type, T_type) if T_type is None: dispatch = "%s" % (I_type,) else: dispatch = "%s,%s" % (I_type, T_type) if 'B' in arg_spec: dispatch += ",npy_bool_wrapper" piece = """ case %(j)s:""" if ret_spec == 'v': piece += """ (void)%(name)s<%(dispatch)s>(%(arglist)s); return 0;""" else: piece += """ return %(name)s<%(dispatch)s>(%(arglist)s);""" thunk_content += piece % dict(j=j, I_type=I_type, T_type=T_type, I_typenum=I_typenum, T_typenum=T_typenum, arglist=arglist, name=name, dispatch=dispatch) thunk_content += """ default: throw std::runtime_error("internal error: invalid argument typenums"); }""" thunk_code = THUNK_TEMPLATE % dict(name=name, thunk_content=thunk_content) # Generate method code method_code = METHOD_TEMPLATE % dict(name=name, ret_spec=ret_spec, arg_spec=arg_spec) return thunk_code, method_code def main(): p = optparse.OptionParser(usage=(__doc__ or '').strip()) p.add_option("--no-force", action="store_false", dest="force", default=True) options, args = p.parse_args() names = [] i_types, it_types, getter_code = get_thunk_type_set() # Generate *_impl.h for each compilation unit for unit_name, routines in COMPILATION_UNITS: thunks = [] methods = [] # Generate thunks and methods for all routines for line in routines.splitlines(): line = line.strip() if not line or line.startswith('#'): continue try: name, args = line.split(None, 1) except ValueError: raise ValueError("Malformed line: %r" % (line,)) args = "".join(args.split()) if 't' in args or 'T' in args: thunk, method = parse_routine(name, args, it_types) else: thunk, method = parse_routine(name, args, i_types) if name in names: raise ValueError("Duplicate routine %r" % (name,)) names.append(name) thunks.append(thunk) methods.append(method) # Produce output dst = os.path.join(os.path.dirname(__file__), 'sparsetools', unit_name + '_impl.h') if newer(__file__, dst) or options.force: print("[generate_sparsetools] generating %r" % (dst,)) with open(dst, 'w') as f: write_autogen_blurb(f) f.write(getter_code) for thunk in thunks: f.write(thunk) for method in methods: f.write(method) else: print("[generate_sparsetools] %r already up-to-date" % (dst,)) # Generate code for method struct method_defs = "" for name in names: method_defs += "NPY_VISIBILITY_HIDDEN PyObject *%s_method(PyObject *, PyObject *);\n" % (name,) method_struct = """\nstatic struct PyMethodDef sparsetools_methods[] = {""" for name in names: method_struct += """ {"%(name)s", (PyCFunction)%(name)s_method, METH_VARARGS, NULL},""" % dict(name=name) method_struct += """ {NULL, NULL, 0, NULL} };""" # Produce sparsetools_impl.h dst = os.path.join(os.path.dirname(__file__), 'sparsetools', 'sparsetools_impl.h') if newer(__file__, dst) or options.force: print("[generate_sparsetools] generating %r" % (dst,)) with open(dst, 'w') as f: write_autogen_blurb(f) f.write(method_defs) f.write(method_struct) else: print("[generate_sparsetools] %r already up-to-date" % (dst,)) def write_autogen_blurb(stream): stream.write("""\ /* This file is autogenerated by generate_sparsetools.py * Do not edit manually or check into VCS. */ """) if __name__ == "__main__": main()
import numpy as np from numpy.testing import assert_allclose import pytest from scipy.fft import dct, idct, dctn, idctn, dst, idst, dstn, idstn import scipy.fft as fft from scipy import fftpack # scipy.fft wraps the fftpack versions but with normalized inverse transforms. # So, the forward transforms and definitions are already thoroughly tested in # fftpack/test_real_transforms.py @pytest.mark.parametrize("forward, backward", [(dct, idct), (dst, idst)]) @pytest.mark.parametrize("type", [1, 2, 3, 4]) @pytest.mark.parametrize("n", [2, 3, 4, 5, 10, 16]) @pytest.mark.parametrize("axis", [0, 1]) @pytest.mark.parametrize("norm", [None, 'ortho']) def test_identity_1d(forward, backward, type, n, axis, norm): # Test the identity f^-1(f(x)) == x x = np.random.rand(n, n) y = forward(x, type, axis=axis, norm=norm) z = backward(y, type, axis=axis, norm=norm) assert_allclose(z, x) pad = [(0, 0)] * 2 pad[axis] = (0, 4) y2 = np.pad(y, pad, mode='edge') z2 = backward(y2, type, n, axis, norm) assert_allclose(z2, x) @pytest.mark.parametrize("forward, backward", [(dctn, idctn), (dstn, idstn)]) @pytest.mark.parametrize("type", [1, 2, 3, 4]) @pytest.mark.parametrize("shape, axes", [ ((4, 4), 0), ((4, 4), 1), ((4, 4), None), ((4, 4), (0, 1)), ((10, 12), None), ((10, 12), (0, 1)), ((4, 5, 6), None), ((4, 5, 6), 1), ((4, 5, 6), (0, 2)), ]) @pytest.mark.parametrize("norm", [None, 'ortho']) def test_identity_nd(forward, backward, type, shape, axes, norm): # Test the identity f^-1(f(x)) == x x = np.random.random(shape) if axes is not None: shape = np.take(shape, axes) y = forward(x, type, axes=axes, norm=norm) z = backward(y, type, axes=axes, norm=norm) assert_allclose(z, x) if axes is None: pad = [(0, 4)] * x.ndim elif isinstance(axes, int): pad = [(0, 0)] * x.ndim pad[axes] = (0, 4) else: pad = [(0, 0)] * x.ndim for a in axes: pad[a] = (0, 4) y2 = np.pad(y, pad, mode='edge') z2 = backward(y2, type, shape, axes, norm) assert_allclose(z2, x) @pytest.mark.parametrize("func", ['dct', 'dst', 'dctn', 'dstn']) @pytest.mark.parametrize("type", [1, 2, 3, 4]) @pytest.mark.parametrize("norm", [None, 'ortho']) def test_fftpack_equivalience(func, type, norm): x = np.random.rand(8, 16) fft_res = getattr(fft, func)(x, type, norm=norm) fftpack_res = getattr(fftpack, func)(x, type, norm=norm) assert_allclose(fft_res, fftpack_res)
lhilt/scipy
scipy/fft/tests/test_real_transforms.py
scipy/sparse/generate_sparsetools.py
# Licensed under a 3-clause BSD style license - see LICENSE.rst """ Here are all the test parameters and values for the each `~astropy.modeling.FittableModel` defined. There is a dictionary for 1D and a dictionary for 2D models. Explanation of keywords of the dictionaries: "parameters" : list or dict Model parameters, the model is tested with. Make sure you keep the right order. For polynomials you can also use a dict to specify the coefficients. See examples below. "x_values" : list x values where the model is evaluated. "y_values" : list Reference y values for the in x_values given positions. "z_values" : list Reference z values for the in x_values and y_values given positions. (2D model option) "x_lim" : list x test range for the model fitter. Depending on the model this can differ e.g. the PowerLaw model should be tested over a few magnitudes. "y_lim" : list y test range for the model fitter. Depending on the model this can differ e.g. the PowerLaw model should be tested over a few magnitudes. (2D model option) "log_fit" : bool PowerLaw models should be tested over a few magnitudes. So log_fit should be true. "requires_scipy" : bool If a model requires scipy (Bessel functions etc.) set this flag. "integral" : float Approximate value of the integral in the range x_lim (and y_lim). "deriv_parameters" : list If given the test of the derivative will use these parameters to create a model (optional) "deriv_initial" : list If given the test of the derivative will use these parameters as initial values for the fit (optional) """ from ..functional_models import ( Gaussian1D, Sine1D, Box1D, Linear1D, Lorentz1D, MexicanHat1D, Trapezoid1D, Const1D, Moffat1D, Gaussian2D, Const2D, Box2D, MexicanHat2D, TrapezoidDisk2D, AiryDisk2D, Moffat2D, Disk2D, Ring2D, Sersic1D, Sersic2D, Voigt1D, Planar2D) from ..polynomial import Polynomial1D, Polynomial2D from ..powerlaws import ( PowerLaw1D, BrokenPowerLaw1D, SmoothlyBrokenPowerLaw1D, ExponentialCutoffPowerLaw1D, LogParabola1D) import numpy as np # 1D Models models_1D = { Gaussian1D: { 'parameters': [1, 0, 1], 'x_values': [0, np.sqrt(2), -np.sqrt(2)], 'y_values': [1.0, 0.367879, 0.367879], 'x_lim': [-10, 10], 'integral': np.sqrt(2 * np.pi) }, Sine1D: { 'parameters': [1, 0.1, 0], 'x_values': [0, 2.5], 'y_values': [0, 1], 'x_lim': [-10, 10], 'integral': 0 }, Box1D: { 'parameters': [1, 0, 10], 'x_values': [-5, 5, 0, -10, 10], 'y_values': [1, 1, 1, 0, 0], 'x_lim': [-10, 10], 'integral': 10 }, Linear1D: { 'parameters': [1, 0], 'x_values': [0, np.pi, 42, -1], 'y_values': [0, np.pi, 42, -1], 'x_lim': [-10, 10], 'integral': 0 }, Lorentz1D: { 'parameters': [1, 0, 1], 'x_values': [0, -1, 1, 0.5, -0.5], 'y_values': [1., 0.2, 0.2, 0.5, 0.5], 'x_lim': [-10, 10], 'integral': 1 }, MexicanHat1D: { 'parameters': [1, 0, 1], 'x_values': [0, 1, -1, 3, -3], 'y_values': [1.0, 0.0, 0.0, -0.088872, -0.088872], 'x_lim': [-20, 20], 'integral': 0 }, Trapezoid1D: { 'parameters': [1, 0, 2, 1], 'x_values': [0, 1, -1, 1.5, -1.5, 2, 2], 'y_values': [1, 1, 1, 0.5, 0.5, 0, 0], 'x_lim': [-10, 10], 'integral': 3 }, Const1D: { 'parameters': [1], 'x_values': [-1, 1, np.pi, -42., 0], 'y_values': [1, 1, 1, 1, 1], 'x_lim': [-10, 10], 'integral': 20 }, Moffat1D: { 'parameters': [1, 0, 1, 2], 'x_values': [0, 1, -1, 3, -3], 'y_values': [1.0, 0.25, 0.25, 0.01, 0.01], 'x_lim': [-10, 10], 'integral': 1, 'deriv_parameters': [23.4, 1.2, 2.1, 2.3], 'deriv_initial': [10, 1, 1, 1] }, PowerLaw1D: { 'parameters': [1, 1, 2], 'constraints': {'fixed': {'x_0': True}}, 'x_values': [1, 10, 100], 'y_values': [1.0, 0.01, 0.0001], 'x_lim': [1, 10], 'log_fit': True, 'integral': 0.99 }, BrokenPowerLaw1D: { 'parameters': [1, 1, 2, 3], 'constraints': {'fixed': {'x_break': True}}, 'x_values': [0.1, 1, 10, 100], 'y_values': [1e2, 1.0, 1e-3, 1e-6], 'x_lim': [0.1, 100], 'log_fit': True }, SmoothlyBrokenPowerLaw1D: { 'parameters': [1, 1, -2, 2, 0.5], 'constraints': {'fixed': {'x_break': True, 'delta': True}}, 'x_values': [0.01, 1, 100], 'y_values': [3.99920012e-04, 1.0, 3.99920012e-04], 'x_lim': [0.01, 100], 'log_fit': True }, ExponentialCutoffPowerLaw1D: { 'parameters': [1, 1, 2, 3], 'constraints': {'fixed': {'x_0': True}}, 'x_values': [0.1, 1, 10, 100], 'y_values': [9.67216100e+01, 7.16531311e-01, 3.56739933e-04, 3.33823780e-19], 'x_lim': [0.01, 100], 'log_fit': True }, LogParabola1D: { 'parameters': [1, 2, 3, 0.1], 'constraints': {'fixed': {'x_0': True}}, 'x_values': [0.1, 1, 10, 100], 'y_values': [3.26089063e+03, 7.62472488e+00, 6.17440488e-03, 1.73160572e-06], 'x_lim': [0.1, 100], 'log_fit': True }, Polynomial1D: { 'parameters': {'degree': 2, 'c0': 1., 'c1': 1., 'c2': 1.}, 'x_values': [1, 10, 100], 'y_values': [3, 111, 10101], 'x_lim': [-3, 3] }, Sersic1D: { 'parameters': [1, 20, 4], 'x_values': [0.1, 1, 10, 100], 'y_values': [2.78629391e+02, 5.69791430e+01, 3.38788244e+00, 2.23941982e-02], 'requires_scipy': True, 'x_lim': [0, 10], 'log_fit': True }, Voigt1D: { 'parameters': [0, 1, 0.5, 0.9], 'x_values': [0, 2, 4, 8, 10], 'y_values': [0.520935, 0.017205, 0.003998, 0.000983, 0.000628], 'x_lim': [-3, 3] } } # 2D Models models_2D = { Gaussian2D: { 'parameters': [1, 0, 0, 1, 1], 'constraints': {'fixed': {'theta': True}}, 'x_values': [0, np.sqrt(2), -np.sqrt(2)], 'y_values': [0, np.sqrt(2), -np.sqrt(2)], 'z_values': [1, 1. / np.exp(1) ** 2, 1. / np.exp(1) ** 2], 'x_lim': [-10, 10], 'y_lim': [-10, 10], 'integral': 2 * np.pi, 'deriv_parameters': [137., 5.1, 5.4, 1.5, 2., np.pi/4], 'deriv_initial': [10, 5, 5, 4, 4, .5] }, Const2D: { 'parameters': [1], 'x_values': [-1, 1, np.pi, -42., 0], 'y_values': [0, 1, 42, np.pi, -1], 'z_values': [1, 1, 1, 1, 1], 'x_lim': [-10, 10], 'y_lim': [-10, 10], 'integral': 400 }, Box2D: { 'parameters': [1, 0, 0, 10, 10], 'x_values': [-5, 5, -5, 5, 0, -10, 10], 'y_values': [-5, 5, 0, 0, 0, -10, 10], 'z_values': [1, 1, 1, 1, 1, 0, 0], 'x_lim': [-10, 10], 'y_lim': [-10, 10], 'integral': 100 }, MexicanHat2D: { 'parameters': [1, 0, 0, 1], 'x_values': [0, 0, 0, 0, 0, 1, -1, 3, -3], 'y_values': [0, 1, -1, 3, -3, 0, 0, 0, 0], 'z_values': [1.0, 0.303265, 0.303265, -0.038881, -0.038881, 0.303265, 0.303265, -0.038881, -0.038881], 'x_lim': [-10, 11], 'y_lim': [-10, 11], 'integral': 0 }, TrapezoidDisk2D: { 'parameters': [1, 0, 0, 1, 1], 'x_values': [0, 0.5, 0, 1.5], 'y_values': [0, 0.5, 1.5, 0], 'z_values': [1, 1, 0.5, 0.5], 'x_lim': [-3, 3], 'y_lim': [-3, 3] }, AiryDisk2D: { 'parameters': [7, 0, 0, 10], 'x_values': [0, 1, -1, -0.5, -0.5], 'y_values': [0, -1, 0.5, 0.5, -0.5], 'z_values': [7., 6.50158267, 6.68490643, 6.87251093, 6.87251093], 'x_lim': [-10, 10], 'y_lim': [-10, 10], 'requires_scipy': True }, Moffat2D: { 'parameters': [1, 0, 0, 1, 2], 'x_values': [0, 1, -1, 3, -3], 'y_values': [0, -1, 3, 1, -3], 'z_values': [1.0, 0.111111, 0.008264, 0.008264, 0.00277], 'x_lim': [-3, 3], 'y_lim': [-3, 3] }, Polynomial2D: { 'parameters': {'degree': 1, 'c0_0': 1., 'c1_0': 1., 'c0_1': 1.}, 'x_values': [1, 2, 3], 'y_values': [1, 3, 2], 'z_values': [3, 6, 6], 'x_lim': [1, 100], 'y_lim': [1, 100] }, Disk2D: { 'parameters': [1, 0, 0, 5], 'x_values': [-5, 5, -5, 5, 0, -10, 10], 'y_values': [-5, 5, 0, 0, 0, -10, 10], 'z_values': [0, 0, 1, 1, 1, 0, 0], 'x_lim': [-10, 10], 'y_lim': [-10, 10], 'integral': np.pi * 5 ** 2 }, Ring2D: { 'parameters': [1, 0, 0, 5, 5], 'x_values': [-5, 5, -5, 5, 0, -10, 10], 'y_values': [-5, 5, 0, 0, 0, -10, 10], 'z_values': [1, 1, 1, 1, 0, 0, 0], 'x_lim': [-10, 10], 'y_lim': [-10, 10], 'integral': np.pi * (10 ** 2 - 5 ** 2) }, Sersic2D: { 'parameters': [1, 25, 4, 50, 50, 0.5, -1], 'x_values': [0.0, 1, 10, 100], 'y_values': [1, 100, 0.0, 10], 'z_values': [1.686398e-02, 9.095221e-02, 2.341879e-02, 9.419231e-02], 'requires_scipy': True, 'x_lim': [1, 1e10], 'y_lim': [1, 1e10] }, Planar2D: { 'parameters': [1, 1, 0], 'x_values': [0, np.pi, 42, -1], 'y_values': [np.pi, 0, -1, 42], 'z_values': [np.pi, np.pi, 41, 41], 'x_lim': [-10, 10], 'y_lim': [-10, 10], 'integral': 0 } }
# Licensed under a 3-clause BSD style license - see LICENSE.rst import pytest import numpy as np from ....tests.helper import catch_warnings from ....table import Table, QTable, NdarrayMixin, Column from .... import units as u from ....coordinates import SkyCoord, Latitude, Longitude, Angle, EarthLocation from ....time import Time, TimeDelta from ....units.quantity import QuantityInfo try: import h5py except ImportError: HAS_H5PY = False else: HAS_H5PY = True try: import yaml except ImportError: HAS_YAML = False else: HAS_YAML = True ALL_DTYPES = [np.uint8, np.uint16, np.uint32, np.uint64, np.int8, np.int16, np.int32, np.int64, np.float32, np.float64, np.bool_, '|S3'] def _default_values(dtype): if dtype == np.bool_: return [0, 1, 1] elif dtype == '|S3': return [b'abc', b'def', b'ghi'] else: return [1, 2, 3] @pytest.mark.skipif('not HAS_H5PY') def test_write_nopath(tmpdir): test_file = str(tmpdir.join('test.hdf5')) t1 = Table() t1.add_column(Column(name='a', data=[1, 2, 3])) with pytest.raises(ValueError) as exc: t1.write(test_file) assert exc.value.args[0] == "table path should be set via the path= argument" @pytest.mark.skipif('not HAS_H5PY') def test_read_notable_nopath(tmpdir): test_file = str(tmpdir.join('test.hdf5')) h5py.File(test_file, 'w').close() # create empty file with pytest.raises(ValueError) as exc: t1 = Table.read(test_file, path='/', format='hdf5') assert exc.value.args[0] == 'no table found in HDF5 group /' @pytest.mark.skipif('not HAS_H5PY') def test_read_nopath(tmpdir): test_file = str(tmpdir.join('test.hdf5')) t1 = Table() t1.add_column(Column(name='a', data=[1, 2, 3])) t1.write(test_file, path='the_table') t2 = Table.read(test_file) assert np.all(t1['a'] == t2['a']) @pytest.mark.skipif('not HAS_H5PY') def test_write_invalid_path(tmpdir): test_file = str(tmpdir.join('test.hdf5')) t1 = Table() t1.add_column(Column(name='a', data=[1, 2, 3])) with pytest.raises(ValueError) as exc: t1.write(test_file, path='test/') assert exc.value.args[0] == "table path should end with table name, not /" @pytest.mark.skipif('not HAS_H5PY') def test_read_invalid_path(tmpdir): test_file = str(tmpdir.join('test.hdf5')) t1 = Table() t1.add_column(Column(name='a', data=[1, 2, 3])) t1.write(test_file, path='the_table') with pytest.raises(OSError) as exc: Table.read(test_file, path='test/') assert exc.value.args[0] == "Path test/ does not exist" @pytest.mark.skipif('not HAS_H5PY') def test_read_missing_group(tmpdir): test_file = str(tmpdir.join('test.hdf5')) h5py.File(test_file, 'w').close() # create empty file with pytest.raises(OSError) as exc: Table.read(test_file, path='test/path/table') assert exc.value.args[0] == "Path test/path/table does not exist" @pytest.mark.skipif('not HAS_H5PY') def test_read_missing_table(tmpdir): test_file = str(tmpdir.join('test.hdf5')) with h5py.File(test_file, 'w') as f: f.create_group('test').create_group('path') with pytest.raises(OSError) as exc: Table.read(test_file, path='test/path/table') assert exc.value.args[0] == "Path test/path/table does not exist" @pytest.mark.skipif('not HAS_H5PY') def test_read_missing_group_fileobj(tmpdir): test_file = str(tmpdir.join('test.hdf5')) with h5py.File(test_file, 'w') as f: with pytest.raises(OSError) as exc: Table.read(f, path='test/path/table') assert exc.value.args[0] == "Path test/path/table does not exist" @pytest.mark.skipif('not HAS_H5PY') def test_read_write_simple(tmpdir): test_file = str(tmpdir.join('test.hdf5')) t1 = Table() t1.add_column(Column(name='a', data=[1, 2, 3])) t1.write(test_file, path='the_table') t2 = Table.read(test_file, path='the_table') assert np.all(t2['a'] == [1, 2, 3]) @pytest.mark.skipif('not HAS_H5PY') def test_read_write_existing_table(tmpdir): test_file = str(tmpdir.join('test.hdf5')) t1 = Table() t1.add_column(Column(name='a', data=[1, 2, 3])) t1.write(test_file, path='the_table') with pytest.raises(OSError) as exc: t1.write(test_file, path='the_table', append=True) assert exc.value.args[0] == "Table the_table already exists" @pytest.mark.skipif('not HAS_H5PY') def test_read_write_memory(tmpdir): with h5py.File('test', 'w', driver='core', backing_store=False) as output_file: t1 = Table() t1.add_column(Column(name='a', data=[1, 2, 3])) t1.write(output_file, path='the_table') t2 = Table.read(output_file, path='the_table') assert np.all(t2['a'] == [1, 2, 3]) @pytest.mark.skipif('not HAS_H5PY') def test_read_write_existing(tmpdir): test_file = str(tmpdir.join('test.hdf5')) h5py.File(test_file, 'w').close() # create empty file t1 = Table() t1.add_column(Column(name='a', data=[1, 2, 3])) with pytest.raises(OSError) as exc: t1.write(test_file, path='the_table') assert exc.value.args[0].startswith("File exists:") @pytest.mark.skipif('not HAS_H5PY') def test_read_write_existing_overwrite(tmpdir): test_file = str(tmpdir.join('test.hdf5')) h5py.File(test_file, 'w').close() # create empty file t1 = Table() t1.add_column(Column(name='a', data=[1, 2, 3])) t1.write(test_file, path='the_table', overwrite=True) t2 = Table.read(test_file, path='the_table') assert np.all(t2['a'] == [1, 2, 3]) @pytest.mark.skipif('not HAS_H5PY') def test_read_write_existing_append(tmpdir): test_file = str(tmpdir.join('test.hdf5')) h5py.File(test_file, 'w').close() # create empty file t1 = Table() t1.add_column(Column(name='a', data=[1, 2, 3])) t1.write(test_file, path='the_table_1', append=True) t1.write(test_file, path='the_table_2', append=True) t2 = Table.read(test_file, path='the_table_1') assert np.all(t2['a'] == [1, 2, 3]) t3 = Table.read(test_file, path='the_table_2') assert np.all(t3['a'] == [1, 2, 3]) @pytest.mark.skipif('not HAS_H5PY') def test_read_write_existing_append_groups(tmpdir): test_file = str(tmpdir.join('test.hdf5')) with h5py.File(test_file, 'w') as f: f.create_group('test_1') t1 = Table() t1.add_column(Column(name='a', data=[1, 2, 3])) t1.write(test_file, path='test_1/the_table_1', append=True) t1.write(test_file, path='test_2/the_table_2', append=True) t2 = Table.read(test_file, path='test_1/the_table_1') assert np.all(t2['a'] == [1, 2, 3]) t3 = Table.read(test_file, path='test_2/the_table_2') assert np.all(t3['a'] == [1, 2, 3]) @pytest.mark.skipif('not HAS_H5PY') def test_read_write_existing_append_overwrite(tmpdir): test_file = str(tmpdir.join('test.hdf5')) t1 = Table() t1.add_column(Column(name='a', data=[1, 2, 3])) t1.write(test_file, path='table1') t1.write(test_file, path='table2', append=True) t1v2 = Table() t1v2.add_column(Column(name='a', data=[4, 5, 6])) with pytest.raises(OSError) as exc: t1v2.write(test_file, path='table1', append=True) assert exc.value.args[0] == 'Table table1 already exists' t1v2.write(test_file, path='table1', append=True, overwrite=True) t2 = Table.read(test_file, path='table1') assert np.all(t2['a'] == [4, 5, 6]) t3 = Table.read(test_file, path='table2') assert np.all(t3['a'] == [1, 2, 3]) @pytest.mark.skipif('not HAS_H5PY') def test_read_fileobj(tmpdir): test_file = str(tmpdir.join('test.hdf5')) t1 = Table() t1.add_column(Column(name='a', data=[1, 2, 3])) t1.write(test_file, path='the_table') import h5py with h5py.File(test_file, 'r') as input_file: t2 = Table.read(input_file, path='the_table') assert np.all(t2['a'] == [1, 2, 3]) @pytest.mark.skipif('not HAS_H5PY') def test_read_filobj_path(tmpdir): test_file = str(tmpdir.join('test.hdf5')) t1 = Table() t1.add_column(Column(name='a', data=[1, 2, 3])) t1.write(test_file, path='path/to/data/the_table') import h5py with h5py.File(test_file, 'r') as input_file: t2 = Table.read(input_file, path='path/to/data/the_table') assert np.all(t2['a'] == [1, 2, 3]) @pytest.mark.skipif('not HAS_H5PY') def test_read_filobj_group_path(tmpdir): test_file = str(tmpdir.join('test.hdf5')) t1 = Table() t1.add_column(Column(name='a', data=[1, 2, 3])) t1.write(test_file, path='path/to/data/the_table') import h5py with h5py.File(test_file, 'r') as input_file: t2 = Table.read(input_file['path/to'], path='data/the_table') assert np.all(t2['a'] == [1, 2, 3]) @pytest.mark.skipif('not HAS_H5PY') def test_read_wrong_fileobj(): class FakeFile: def read(self): pass f = FakeFile() with pytest.raises(TypeError) as exc: t1 = Table.read(f, format='hdf5') assert exc.value.args[0] == 'h5py can only open regular files' @pytest.mark.skipif('not HAS_H5PY') def test_write_fileobj(tmpdir): test_file = str(tmpdir.join('test.hdf5')) import h5py with h5py.File(test_file, 'w') as output_file: t1 = Table() t1.add_column(Column(name='a', data=[1, 2, 3])) t1.write(output_file, path='the_table') t2 = Table.read(test_file, path='the_table') assert np.all(t2['a'] == [1, 2, 3]) @pytest.mark.skipif('not HAS_H5PY') def test_write_filobj_group(tmpdir): test_file = str(tmpdir.join('test.hdf5')) import h5py with h5py.File(test_file, 'w') as output_file: t1 = Table() t1.add_column(Column(name='a', data=[1, 2, 3])) t1.write(output_file, path='path/to/data/the_table') t2 = Table.read(test_file, path='path/to/data/the_table') assert np.all(t2['a'] == [1, 2, 3]) @pytest.mark.skipif('not HAS_H5PY') def test_write_wrong_type(): t1 = Table() t1.add_column(Column(name='a', data=[1, 2, 3])) with pytest.raises(TypeError) as exc: t1.write(1212, path='path/to/data/the_table', format='hdf5') assert exc.value.args[0] == ('output should be a string ' 'or an h5py File or Group object') @pytest.mark.skipif('not HAS_H5PY') @pytest.mark.parametrize(('dtype'), ALL_DTYPES) def test_preserve_single_dtypes(tmpdir, dtype): test_file = str(tmpdir.join('test.hdf5')) values = _default_values(dtype) t1 = Table() t1.add_column(Column(name='a', data=np.array(values, dtype=dtype))) t1.write(test_file, path='the_table') t2 = Table.read(test_file, path='the_table') assert np.all(t2['a'] == values) assert t2['a'].dtype == dtype @pytest.mark.skipif('not HAS_H5PY') def test_preserve_all_dtypes(tmpdir): test_file = str(tmpdir.join('test.hdf5')) t1 = Table() for dtype in ALL_DTYPES: values = _default_values(dtype) t1.add_column(Column(name=str(dtype), data=np.array(values, dtype=dtype))) t1.write(test_file, path='the_table') t2 = Table.read(test_file, path='the_table') for dtype in ALL_DTYPES: values = _default_values(dtype) assert np.all(t2[str(dtype)] == values) assert t2[str(dtype)].dtype == dtype @pytest.mark.skipif('not HAS_H5PY') def test_preserve_meta(tmpdir): test_file = str(tmpdir.join('test.hdf5')) t1 = Table() t1.add_column(Column(name='a', data=[1, 2, 3])) t1.meta['a'] = 1 t1.meta['b'] = 'hello' t1.meta['c'] = 3.14159 t1.meta['d'] = True t1.meta['e'] = np.array([1, 2, 3]) t1.write(test_file, path='the_table') t2 = Table.read(test_file, path='the_table') for key in t1.meta: assert np.all(t1.meta[key] == t2.meta[key]) @pytest.mark.skipif('not HAS_H5PY or not HAS_YAML') def test_preserve_serialized(tmpdir): test_file = str(tmpdir.join('test.hdf5')) t1 = Table() t1['a'] = Column(data=[1, 2, 3], unit="s") t1['a'].meta['a0'] = "A0" t1['a'].meta['a1'] = {"a1": [0, 1]} t1['a'].format = '7.3f' t1['a'].description = 'A column' t1.meta['b'] = 1 t1.meta['c'] = {"c0": [0, 1]} t1.write(test_file, path='the_table', serialize_meta=True, overwrite=True) t2 = Table.read(test_file, path='the_table') assert t1['a'].unit == t2['a'].unit assert t1['a'].format == t2['a'].format assert t1['a'].description == t2['a'].description assert t1['a'].meta == t2['a'].meta assert t1.meta == t2.meta @pytest.mark.skipif('not HAS_H5PY or not HAS_YAML') def test_preserve_serialized_compatibility_mode(tmpdir): test_file = str(tmpdir.join('test.hdf5')) t1 = Table() t1['a'] = Column(data=[1, 2, 3], unit="s") t1['a'].meta['a0'] = "A0" t1['a'].meta['a1'] = {"a1": [0, 1]} t1['a'].format = '7.3f' t1['a'].description = 'A column' t1.meta['b'] = 1 t1.meta['c'] = {"c0": [0, 1]} with catch_warnings() as w: t1.write(test_file, path='the_table', serialize_meta=True, overwrite=True, compatibility_mode=True) assert str(w[0].message).startswith( "compatibility mode for writing is deprecated") t2 = Table.read(test_file, path='the_table') assert t1['a'].unit == t2['a'].unit assert t1['a'].format == t2['a'].format assert t1['a'].description == t2['a'].description assert t1['a'].meta == t2['a'].meta assert t1.meta == t2.meta @pytest.mark.skipif('not HAS_H5PY or not HAS_YAML') def test_preserve_serialized_in_complicated_path(tmpdir): test_file = str(tmpdir.join('test.hdf5')) t1 = Table() t1['a'] = Column(data=[1, 2, 3], unit="s") t1['a'].meta['a0'] = "A0" t1['a'].meta['a1'] = {"a1": [0, 1]} t1['a'].format = '7.3f' t1['a'].description = 'A column' t1.meta['b'] = 1 t1.meta['c'] = {"c0": [0, 1]} t1.write(test_file, path='the_table/complicated/path', serialize_meta=True, overwrite=True) t2 = Table.read(test_file, path='the_table/complicated/path') assert t1['a'].format == t2['a'].format assert t1['a'].unit == t2['a'].unit assert t1['a'].description == t2['a'].description assert t1['a'].meta == t2['a'].meta assert t1.meta == t2.meta @pytest.mark.skipif('not HAS_H5PY or not HAS_YAML') def test_metadata_very_large(tmpdir): """Test that very large datasets work, now!""" test_file = str(tmpdir.join('test.hdf5')) t1 = Table() t1['a'] = Column(data=[1, 2, 3], unit="s") t1['a'].meta['a0'] = "A0" t1['a'].meta['a1'] = {"a1": [0, 1]} t1['a'].format = '7.3f' t1['a'].description = 'A column' t1.meta['b'] = 1 t1.meta['c'] = {"c0": [0, 1]} t1.meta["meta_big"] = "0" * (2 ** 16 + 1) t1.meta["meta_biggerstill"] = "0" * (2 ** 18) t1.write(test_file, path='the_table', serialize_meta=True, overwrite=True) t2 = Table.read(test_file, path='the_table') assert t1['a'].unit == t2['a'].unit assert t1['a'].format == t2['a'].format assert t1['a'].description == t2['a'].description assert t1['a'].meta == t2['a'].meta assert t1.meta == t2.meta @pytest.mark.skipif('not HAS_H5PY or not HAS_YAML') def test_metadata_very_large_fails_compatibility_mode(tmpdir): """Test that very large metadata do not work in compatibility mode.""" test_file = str(tmpdir.join('test.hdf5')) t1 = Table() t1['a'] = Column(data=[1, 2, 3]) t1.meta["meta"] = "0" * (2 ** 16 + 1) with catch_warnings() as w: t1.write(test_file, path='the_table', serialize_meta=True, overwrite=True, compatibility_mode=True) assert len(w) == 2 # Error message slightly changed in h5py 2.7.1, thus the 2part assert assert str(w[1].message).startswith( "Attributes could not be written to the output HDF5 " "file: Unable to create attribute ") assert "bject header message is too large" in str(w[1].message) @pytest.mark.skipif('not HAS_H5PY') def test_skip_meta(tmpdir): test_file = str(tmpdir.join('test.hdf5')) t1 = Table() t1.add_column(Column(name='a', data=[1, 2, 3])) t1.meta['a'] = 1 t1.meta['b'] = 'hello' t1.meta['c'] = 3.14159 t1.meta['d'] = True t1.meta['e'] = np.array([1, 2, 3]) t1.meta['f'] = str with catch_warnings() as w: t1.write(test_file, path='the_table') assert len(w) == 1 assert str(w[0].message).startswith( "Attribute `f` of type {0} cannot be written to HDF5 files - skipping".format(type(t1.meta['f']))) @pytest.mark.skipif('not HAS_H5PY or not HAS_YAML') def test_fail_meta_serialize(tmpdir): test_file = str(tmpdir.join('test.hdf5')) t1 = Table() t1.add_column(Column(name='a', data=[1, 2, 3])) t1.meta['f'] = str with pytest.raises(Exception) as err: t1.write(test_file, path='the_table', serialize_meta=True) assert "cannot represent an object: <class 'str'>" in str(err) @pytest.mark.skipif('not HAS_H5PY') def test_read_h5py_objects(tmpdir): # Regression test - ensure that Datasets are recognized automatically test_file = str(tmpdir.join('test.hdf5')) import h5py with h5py.File(test_file, 'w') as output_file: t1 = Table() t1.add_column(Column(name='a', data=[1, 2, 3])) t1.write(output_file, path='the_table') f = h5py.File(test_file) t2 = Table.read(f, path='the_table') assert np.all(t2['a'] == [1, 2, 3]) t3 = Table.read(f['/'], path='the_table') assert np.all(t3['a'] == [1, 2, 3]) t4 = Table.read(f['the_table']) assert np.all(t4['a'] == [1, 2, 3]) f.close() # don't raise an error in 'test --open-files' def assert_objects_equal(obj1, obj2, attrs, compare_class=True): if compare_class: assert obj1.__class__ is obj2.__class__ info_attrs = ['info.name', 'info.format', 'info.unit', 'info.description', 'info.meta'] for attr in attrs + info_attrs: a1 = obj1 a2 = obj2 for subattr in attr.split('.'): try: a1 = getattr(a1, subattr) a2 = getattr(a2, subattr) except AttributeError: a1 = a1[subattr] a2 = a2[subattr] # Mixin info.meta can None instead of empty OrderedDict(), #6720 would # fix this. if attr == 'info.meta': if a1 is None: a1 = {} if a2 is None: a2 = {} assert np.all(a1 == a2) # Testing HDF5 table read/write with mixins. This is mostly # copied from FITS mixin testing. el = EarthLocation(x=1 * u.km, y=3 * u.km, z=5 * u.km) el2 = EarthLocation(x=[1, 2] * u.km, y=[3, 4] * u.km, z=[5, 6] * u.km) sc = SkyCoord([1, 2], [3, 4], unit='deg,deg', frame='fk4', obstime='J1990.5') scc = sc.copy() scc.representation = 'cartesian' tm = Time([2450814.5, 2450815.5], format='jd', scale='tai', location=el) mixin_cols = { 'tm': tm, 'dt': TimeDelta([1, 2] * u.day), 'sc': sc, 'scc': scc, 'scd': SkyCoord([1, 2], [3, 4], [5, 6], unit='deg,deg,m', frame='fk4', obstime=['J1990.5', 'J1991.5']), 'q': [1, 2] * u.m, 'lat': Latitude([1, 2] * u.deg), 'lon': Longitude([1, 2] * u.deg, wrap_angle=180. * u.deg), 'ang': Angle([1, 2] * u.deg), 'el2': el2, } time_attrs = ['value', 'shape', 'format', 'scale', 'location'] compare_attrs = { 'c1': ['data'], 'c2': ['data'], 'tm': time_attrs, 'dt': ['shape', 'value', 'format', 'scale'], 'sc': ['ra', 'dec', 'representation', 'frame.name'], 'scc': ['x', 'y', 'z', 'representation', 'frame.name'], 'scd': ['ra', 'dec', 'distance', 'representation', 'frame.name'], 'q': ['value', 'unit'], 'lon': ['value', 'unit', 'wrap_angle'], 'lat': ['value', 'unit'], 'ang': ['value', 'unit'], 'el2': ['x', 'y', 'z', 'ellipsoid'], 'nd': ['x', 'y', 'z'], } @pytest.mark.skipif('not HAS_H5PY or not HAS_YAML') def test_hdf5_mixins_qtable_to_table(tmpdir): """Test writing as QTable and reading as Table. Ensure correct classes come out. """ filename = str(tmpdir.join('test_simple.hdf5')) names = sorted(mixin_cols) t = QTable([mixin_cols[name] for name in names], names=names) t.write(filename, format='hdf5', path='root', serialize_meta=True) t2 = Table.read(filename, format='hdf5', path='root') assert t.colnames == t2.colnames for name, col in t.columns.items(): col2 = t2[name] # Special-case Time, which does not yet support round-tripping # the format. if isinstance(col2, Time): col2.format = col.format attrs = compare_attrs[name] compare_class = True if isinstance(col.info, QuantityInfo): # Downgrade Quantity to Column + unit assert type(col2) is Column # Class-specific attributes like `value` or `wrap_angle` are lost. attrs = ['unit'] compare_class = False # Compare data values here (assert_objects_equal doesn't know how in this case) assert np.all(col.value == col2) assert_objects_equal(col, col2, attrs, compare_class) @pytest.mark.skipif('not HAS_H5PY or not HAS_YAML') @pytest.mark.parametrize('table_cls', (Table, QTable)) def test_hdf5_mixins_as_one(table_cls, tmpdir): """Test write/read all cols at once and validate intermediate column names""" filename = str(tmpdir.join('test_simple.hdf5')) names = sorted(mixin_cols) serialized_names = ['ang', 'dt.jd1', 'dt.jd2', 'el2.x', 'el2.y', 'el2.z', 'lat', 'lon', 'q', 'sc.ra', 'sc.dec', 'scc.x', 'scc.y', 'scc.z', 'scd.ra', 'scd.dec', 'scd.distance', 'scd.obstime.jd1', 'scd.obstime.jd2', 'tm.jd1', 'tm.jd2', ] t = table_cls([mixin_cols[name] for name in names], names=names) t.meta['C'] = 'spam' t.meta['comments'] = ['this', 'is', 'a', 'comment'] t.meta['history'] = ['first', 'second', 'third'] t.write(filename, format="hdf5", path='root', serialize_meta=True) t2 = table_cls.read(filename, format='hdf5', path='root') assert t2.meta['C'] == 'spam' assert t2.meta['comments'] == ['this', 'is', 'a', 'comment'] assert t2.meta['history'] == ['first', 'second', 'third'] assert t.colnames == t2.colnames # Read directly via hdf5 and confirm column names h5 = h5py.File(filename, 'r') assert list(h5['root'].dtype.names) == serialized_names h5.close() @pytest.mark.skipif('not HAS_H5PY or not HAS_YAML') @pytest.mark.parametrize('name_col', list(mixin_cols.items())) @pytest.mark.parametrize('table_cls', (Table, QTable)) def test_hdf5_mixins_per_column(table_cls, name_col, tmpdir): """Test write/read one col at a time and do detailed validation""" filename = str(tmpdir.join('test_simple.hdf5')) name, col = name_col c = [1.0, 2.0] t = table_cls([c, col, c], names=['c1', name, 'c2']) t[name].info.description = 'my description' t[name].info.meta = {'list': list(range(50)), 'dict': {'a': 'b' * 200}} if not t.has_mixin_columns: pytest.skip('column is not a mixin (e.g. Quantity subclass in Table)') if isinstance(t[name], NdarrayMixin): pytest.xfail('NdarrayMixin not supported') t.write(filename, format="hdf5", path='root', serialize_meta=True) t2 = table_cls.read(filename, format='hdf5', path='root') assert t.colnames == t2.colnames for colname in t.colnames: assert_objects_equal(t[colname], t2[colname], compare_attrs[colname]) # Special case to make sure Column type doesn't leak into Time class data if name.startswith('tm'): assert t2[name]._time.jd1.__class__ is np.ndarray assert t2[name]._time.jd2.__class__ is np.ndarray @pytest.mark.skipif('HAS_YAML or not HAS_H5PY') def test_warn_for_dropped_info_attributes(tmpdir): filename = str(tmpdir.join('test.hdf5')) t = Table([[1, 2]]) t['col0'].info.description = 'hello' with catch_warnings() as warns: t.write(filename, path='root', serialize_meta=False) assert len(warns) == 1 assert str(warns[0].message).startswith( "table contains column(s) with defined 'unit'") @pytest.mark.skipif('HAS_YAML or not HAS_H5PY') def test_error_for_mixins_but_no_yaml(tmpdir): filename = str(tmpdir.join('test.hdf5')) t = Table([mixin_cols['sc']]) with pytest.raises(TypeError) as err: t.write(filename, path='root', serialize_meta=True) assert "cannot write type SkyCoord column 'col0' to HDF5 without PyYAML" in str(err)
funbaker/astropy
astropy/io/misc/tests/test_hdf5.py
astropy/modeling/tests/example_models.py
# Licensed under a 3-clause BSD style license - see LICENSE.rst # -*- coding: utf-8 -*- import numpy as np from numpy.testing import assert_array_equal from asdf import yamlutil from astropy import modeling from .basic import TransformType __all__ = ['ShiftType', 'ScaleType', 'PolynomialType'] class ShiftType(TransformType): name = "transform/shift" types = ['astropy.modeling.models.Shift'] @classmethod def from_tree_transform(cls, node, ctx): offset = node['offset'] if not np.isscalar(offset): raise NotImplementedError( "Asdf currently only supports scalar inputs to Shift transform.") return modeling.models.Shift(offset) @classmethod def to_tree_transform(cls, model, ctx): return {'offset': model.offset.value} #return yamlutil.custom_tree_to_tagged_tree(node, ctx) @classmethod def assert_equal(cls, a, b): # TODO: If models become comparable themselves, remove this. TransformType.assert_equal(a, b) assert (isinstance(a, modeling.models.Shift) and isinstance(b, modeling.models.Shift)) assert_array_equal(a.offset.value, b.offset.value) class ScaleType(TransformType): name = "transform/scale" types = ['astropy.modeling.models.Scale'] @classmethod def from_tree_transform(cls, node, ctx): factor = node['factor'] if not np.isscalar(factor): raise NotImplementedError( "Asdf currently only supports scalar inputs to Scale transform.") return modeling.models.Scale(factor) @classmethod def to_tree_transform(cls, model, ctx): node = {'factor': model.factor.value} return yamlutil.custom_tree_to_tagged_tree(node, ctx) @classmethod def assert_equal(cls, a, b): # TODO: If models become comparable themselves, remove this. TransformType.assert_equal(a, b) assert (isinstance(a, modeling.models.Scale) and isinstance(b, modeling.models.Scale)) assert_array_equal(a.factor, b.factor) class PolynomialType(TransformType): name = "transform/polynomial" types = ['astropy.modeling.models.Polynomial1D', 'astropy.modeling.models.Polynomial2D'] @classmethod def from_tree_transform(cls, node, ctx): coefficients = np.asarray(node['coefficients']) n_dim = coefficients.ndim if n_dim == 1: model = modeling.models.Polynomial1D(coefficients.size - 1) model.parameters = coefficients elif n_dim == 2: shape = coefficients.shape degree = shape[0] - 1 if shape[0] != shape[1]: raise TypeError("Coefficients must be an (n+1, n+1) matrix") coeffs = {} for i in range(shape[0]): for j in range(shape[0]): if i + j < degree + 1: name = 'c' + str(i) + '_' +str(j) coeffs[name] = coefficients[i, j] model = modeling.models.Polynomial2D(degree, **coeffs) else: raise NotImplementedError( "Asdf currently only supports 1D or 2D polynomial transform.") return model @classmethod def to_tree_transform(cls, model, ctx): if isinstance(model, modeling.models.Polynomial1D): coefficients = np.array(model.parameters) elif isinstance(model, modeling.models.Polynomial2D): degree = model.degree coefficients = np.zeros((degree + 1, degree + 1)) for i in range(degree + 1): for j in range(degree + 1): if i + j < degree + 1: name = 'c' + str(i) + '_' +str(j) coefficients[i, j] = getattr(model, name).value node = {'coefficients': coefficients} return yamlutil.custom_tree_to_tagged_tree(node, ctx) @classmethod def assert_equal(cls, a, b): # TODO: If models become comparable themselves, remove this. TransformType.assert_equal(a, b) assert (isinstance(a, (modeling.models.Polynomial1D, modeling.models.Polynomial2D)) and isinstance(b, (modeling.models.Polynomial1D, modeling.models.Polynomial2D))) assert_array_equal(a.parameters, b.parameters)
# Licensed under a 3-clause BSD style license - see LICENSE.rst import pytest import numpy as np from ....tests.helper import catch_warnings from ....table import Table, QTable, NdarrayMixin, Column from .... import units as u from ....coordinates import SkyCoord, Latitude, Longitude, Angle, EarthLocation from ....time import Time, TimeDelta from ....units.quantity import QuantityInfo try: import h5py except ImportError: HAS_H5PY = False else: HAS_H5PY = True try: import yaml except ImportError: HAS_YAML = False else: HAS_YAML = True ALL_DTYPES = [np.uint8, np.uint16, np.uint32, np.uint64, np.int8, np.int16, np.int32, np.int64, np.float32, np.float64, np.bool_, '|S3'] def _default_values(dtype): if dtype == np.bool_: return [0, 1, 1] elif dtype == '|S3': return [b'abc', b'def', b'ghi'] else: return [1, 2, 3] @pytest.mark.skipif('not HAS_H5PY') def test_write_nopath(tmpdir): test_file = str(tmpdir.join('test.hdf5')) t1 = Table() t1.add_column(Column(name='a', data=[1, 2, 3])) with pytest.raises(ValueError) as exc: t1.write(test_file) assert exc.value.args[0] == "table path should be set via the path= argument" @pytest.mark.skipif('not HAS_H5PY') def test_read_notable_nopath(tmpdir): test_file = str(tmpdir.join('test.hdf5')) h5py.File(test_file, 'w').close() # create empty file with pytest.raises(ValueError) as exc: t1 = Table.read(test_file, path='/', format='hdf5') assert exc.value.args[0] == 'no table found in HDF5 group /' @pytest.mark.skipif('not HAS_H5PY') def test_read_nopath(tmpdir): test_file = str(tmpdir.join('test.hdf5')) t1 = Table() t1.add_column(Column(name='a', data=[1, 2, 3])) t1.write(test_file, path='the_table') t2 = Table.read(test_file) assert np.all(t1['a'] == t2['a']) @pytest.mark.skipif('not HAS_H5PY') def test_write_invalid_path(tmpdir): test_file = str(tmpdir.join('test.hdf5')) t1 = Table() t1.add_column(Column(name='a', data=[1, 2, 3])) with pytest.raises(ValueError) as exc: t1.write(test_file, path='test/') assert exc.value.args[0] == "table path should end with table name, not /" @pytest.mark.skipif('not HAS_H5PY') def test_read_invalid_path(tmpdir): test_file = str(tmpdir.join('test.hdf5')) t1 = Table() t1.add_column(Column(name='a', data=[1, 2, 3])) t1.write(test_file, path='the_table') with pytest.raises(OSError) as exc: Table.read(test_file, path='test/') assert exc.value.args[0] == "Path test/ does not exist" @pytest.mark.skipif('not HAS_H5PY') def test_read_missing_group(tmpdir): test_file = str(tmpdir.join('test.hdf5')) h5py.File(test_file, 'w').close() # create empty file with pytest.raises(OSError) as exc: Table.read(test_file, path='test/path/table') assert exc.value.args[0] == "Path test/path/table does not exist" @pytest.mark.skipif('not HAS_H5PY') def test_read_missing_table(tmpdir): test_file = str(tmpdir.join('test.hdf5')) with h5py.File(test_file, 'w') as f: f.create_group('test').create_group('path') with pytest.raises(OSError) as exc: Table.read(test_file, path='test/path/table') assert exc.value.args[0] == "Path test/path/table does not exist" @pytest.mark.skipif('not HAS_H5PY') def test_read_missing_group_fileobj(tmpdir): test_file = str(tmpdir.join('test.hdf5')) with h5py.File(test_file, 'w') as f: with pytest.raises(OSError) as exc: Table.read(f, path='test/path/table') assert exc.value.args[0] == "Path test/path/table does not exist" @pytest.mark.skipif('not HAS_H5PY') def test_read_write_simple(tmpdir): test_file = str(tmpdir.join('test.hdf5')) t1 = Table() t1.add_column(Column(name='a', data=[1, 2, 3])) t1.write(test_file, path='the_table') t2 = Table.read(test_file, path='the_table') assert np.all(t2['a'] == [1, 2, 3]) @pytest.mark.skipif('not HAS_H5PY') def test_read_write_existing_table(tmpdir): test_file = str(tmpdir.join('test.hdf5')) t1 = Table() t1.add_column(Column(name='a', data=[1, 2, 3])) t1.write(test_file, path='the_table') with pytest.raises(OSError) as exc: t1.write(test_file, path='the_table', append=True) assert exc.value.args[0] == "Table the_table already exists" @pytest.mark.skipif('not HAS_H5PY') def test_read_write_memory(tmpdir): with h5py.File('test', 'w', driver='core', backing_store=False) as output_file: t1 = Table() t1.add_column(Column(name='a', data=[1, 2, 3])) t1.write(output_file, path='the_table') t2 = Table.read(output_file, path='the_table') assert np.all(t2['a'] == [1, 2, 3]) @pytest.mark.skipif('not HAS_H5PY') def test_read_write_existing(tmpdir): test_file = str(tmpdir.join('test.hdf5')) h5py.File(test_file, 'w').close() # create empty file t1 = Table() t1.add_column(Column(name='a', data=[1, 2, 3])) with pytest.raises(OSError) as exc: t1.write(test_file, path='the_table') assert exc.value.args[0].startswith("File exists:") @pytest.mark.skipif('not HAS_H5PY') def test_read_write_existing_overwrite(tmpdir): test_file = str(tmpdir.join('test.hdf5')) h5py.File(test_file, 'w').close() # create empty file t1 = Table() t1.add_column(Column(name='a', data=[1, 2, 3])) t1.write(test_file, path='the_table', overwrite=True) t2 = Table.read(test_file, path='the_table') assert np.all(t2['a'] == [1, 2, 3]) @pytest.mark.skipif('not HAS_H5PY') def test_read_write_existing_append(tmpdir): test_file = str(tmpdir.join('test.hdf5')) h5py.File(test_file, 'w').close() # create empty file t1 = Table() t1.add_column(Column(name='a', data=[1, 2, 3])) t1.write(test_file, path='the_table_1', append=True) t1.write(test_file, path='the_table_2', append=True) t2 = Table.read(test_file, path='the_table_1') assert np.all(t2['a'] == [1, 2, 3]) t3 = Table.read(test_file, path='the_table_2') assert np.all(t3['a'] == [1, 2, 3]) @pytest.mark.skipif('not HAS_H5PY') def test_read_write_existing_append_groups(tmpdir): test_file = str(tmpdir.join('test.hdf5')) with h5py.File(test_file, 'w') as f: f.create_group('test_1') t1 = Table() t1.add_column(Column(name='a', data=[1, 2, 3])) t1.write(test_file, path='test_1/the_table_1', append=True) t1.write(test_file, path='test_2/the_table_2', append=True) t2 = Table.read(test_file, path='test_1/the_table_1') assert np.all(t2['a'] == [1, 2, 3]) t3 = Table.read(test_file, path='test_2/the_table_2') assert np.all(t3['a'] == [1, 2, 3]) @pytest.mark.skipif('not HAS_H5PY') def test_read_write_existing_append_overwrite(tmpdir): test_file = str(tmpdir.join('test.hdf5')) t1 = Table() t1.add_column(Column(name='a', data=[1, 2, 3])) t1.write(test_file, path='table1') t1.write(test_file, path='table2', append=True) t1v2 = Table() t1v2.add_column(Column(name='a', data=[4, 5, 6])) with pytest.raises(OSError) as exc: t1v2.write(test_file, path='table1', append=True) assert exc.value.args[0] == 'Table table1 already exists' t1v2.write(test_file, path='table1', append=True, overwrite=True) t2 = Table.read(test_file, path='table1') assert np.all(t2['a'] == [4, 5, 6]) t3 = Table.read(test_file, path='table2') assert np.all(t3['a'] == [1, 2, 3]) @pytest.mark.skipif('not HAS_H5PY') def test_read_fileobj(tmpdir): test_file = str(tmpdir.join('test.hdf5')) t1 = Table() t1.add_column(Column(name='a', data=[1, 2, 3])) t1.write(test_file, path='the_table') import h5py with h5py.File(test_file, 'r') as input_file: t2 = Table.read(input_file, path='the_table') assert np.all(t2['a'] == [1, 2, 3]) @pytest.mark.skipif('not HAS_H5PY') def test_read_filobj_path(tmpdir): test_file = str(tmpdir.join('test.hdf5')) t1 = Table() t1.add_column(Column(name='a', data=[1, 2, 3])) t1.write(test_file, path='path/to/data/the_table') import h5py with h5py.File(test_file, 'r') as input_file: t2 = Table.read(input_file, path='path/to/data/the_table') assert np.all(t2['a'] == [1, 2, 3]) @pytest.mark.skipif('not HAS_H5PY') def test_read_filobj_group_path(tmpdir): test_file = str(tmpdir.join('test.hdf5')) t1 = Table() t1.add_column(Column(name='a', data=[1, 2, 3])) t1.write(test_file, path='path/to/data/the_table') import h5py with h5py.File(test_file, 'r') as input_file: t2 = Table.read(input_file['path/to'], path='data/the_table') assert np.all(t2['a'] == [1, 2, 3]) @pytest.mark.skipif('not HAS_H5PY') def test_read_wrong_fileobj(): class FakeFile: def read(self): pass f = FakeFile() with pytest.raises(TypeError) as exc: t1 = Table.read(f, format='hdf5') assert exc.value.args[0] == 'h5py can only open regular files' @pytest.mark.skipif('not HAS_H5PY') def test_write_fileobj(tmpdir): test_file = str(tmpdir.join('test.hdf5')) import h5py with h5py.File(test_file, 'w') as output_file: t1 = Table() t1.add_column(Column(name='a', data=[1, 2, 3])) t1.write(output_file, path='the_table') t2 = Table.read(test_file, path='the_table') assert np.all(t2['a'] == [1, 2, 3]) @pytest.mark.skipif('not HAS_H5PY') def test_write_filobj_group(tmpdir): test_file = str(tmpdir.join('test.hdf5')) import h5py with h5py.File(test_file, 'w') as output_file: t1 = Table() t1.add_column(Column(name='a', data=[1, 2, 3])) t1.write(output_file, path='path/to/data/the_table') t2 = Table.read(test_file, path='path/to/data/the_table') assert np.all(t2['a'] == [1, 2, 3]) @pytest.mark.skipif('not HAS_H5PY') def test_write_wrong_type(): t1 = Table() t1.add_column(Column(name='a', data=[1, 2, 3])) with pytest.raises(TypeError) as exc: t1.write(1212, path='path/to/data/the_table', format='hdf5') assert exc.value.args[0] == ('output should be a string ' 'or an h5py File or Group object') @pytest.mark.skipif('not HAS_H5PY') @pytest.mark.parametrize(('dtype'), ALL_DTYPES) def test_preserve_single_dtypes(tmpdir, dtype): test_file = str(tmpdir.join('test.hdf5')) values = _default_values(dtype) t1 = Table() t1.add_column(Column(name='a', data=np.array(values, dtype=dtype))) t1.write(test_file, path='the_table') t2 = Table.read(test_file, path='the_table') assert np.all(t2['a'] == values) assert t2['a'].dtype == dtype @pytest.mark.skipif('not HAS_H5PY') def test_preserve_all_dtypes(tmpdir): test_file = str(tmpdir.join('test.hdf5')) t1 = Table() for dtype in ALL_DTYPES: values = _default_values(dtype) t1.add_column(Column(name=str(dtype), data=np.array(values, dtype=dtype))) t1.write(test_file, path='the_table') t2 = Table.read(test_file, path='the_table') for dtype in ALL_DTYPES: values = _default_values(dtype) assert np.all(t2[str(dtype)] == values) assert t2[str(dtype)].dtype == dtype @pytest.mark.skipif('not HAS_H5PY') def test_preserve_meta(tmpdir): test_file = str(tmpdir.join('test.hdf5')) t1 = Table() t1.add_column(Column(name='a', data=[1, 2, 3])) t1.meta['a'] = 1 t1.meta['b'] = 'hello' t1.meta['c'] = 3.14159 t1.meta['d'] = True t1.meta['e'] = np.array([1, 2, 3]) t1.write(test_file, path='the_table') t2 = Table.read(test_file, path='the_table') for key in t1.meta: assert np.all(t1.meta[key] == t2.meta[key]) @pytest.mark.skipif('not HAS_H5PY or not HAS_YAML') def test_preserve_serialized(tmpdir): test_file = str(tmpdir.join('test.hdf5')) t1 = Table() t1['a'] = Column(data=[1, 2, 3], unit="s") t1['a'].meta['a0'] = "A0" t1['a'].meta['a1'] = {"a1": [0, 1]} t1['a'].format = '7.3f' t1['a'].description = 'A column' t1.meta['b'] = 1 t1.meta['c'] = {"c0": [0, 1]} t1.write(test_file, path='the_table', serialize_meta=True, overwrite=True) t2 = Table.read(test_file, path='the_table') assert t1['a'].unit == t2['a'].unit assert t1['a'].format == t2['a'].format assert t1['a'].description == t2['a'].description assert t1['a'].meta == t2['a'].meta assert t1.meta == t2.meta @pytest.mark.skipif('not HAS_H5PY or not HAS_YAML') def test_preserve_serialized_compatibility_mode(tmpdir): test_file = str(tmpdir.join('test.hdf5')) t1 = Table() t1['a'] = Column(data=[1, 2, 3], unit="s") t1['a'].meta['a0'] = "A0" t1['a'].meta['a1'] = {"a1": [0, 1]} t1['a'].format = '7.3f' t1['a'].description = 'A column' t1.meta['b'] = 1 t1.meta['c'] = {"c0": [0, 1]} with catch_warnings() as w: t1.write(test_file, path='the_table', serialize_meta=True, overwrite=True, compatibility_mode=True) assert str(w[0].message).startswith( "compatibility mode for writing is deprecated") t2 = Table.read(test_file, path='the_table') assert t1['a'].unit == t2['a'].unit assert t1['a'].format == t2['a'].format assert t1['a'].description == t2['a'].description assert t1['a'].meta == t2['a'].meta assert t1.meta == t2.meta @pytest.mark.skipif('not HAS_H5PY or not HAS_YAML') def test_preserve_serialized_in_complicated_path(tmpdir): test_file = str(tmpdir.join('test.hdf5')) t1 = Table() t1['a'] = Column(data=[1, 2, 3], unit="s") t1['a'].meta['a0'] = "A0" t1['a'].meta['a1'] = {"a1": [0, 1]} t1['a'].format = '7.3f' t1['a'].description = 'A column' t1.meta['b'] = 1 t1.meta['c'] = {"c0": [0, 1]} t1.write(test_file, path='the_table/complicated/path', serialize_meta=True, overwrite=True) t2 = Table.read(test_file, path='the_table/complicated/path') assert t1['a'].format == t2['a'].format assert t1['a'].unit == t2['a'].unit assert t1['a'].description == t2['a'].description assert t1['a'].meta == t2['a'].meta assert t1.meta == t2.meta @pytest.mark.skipif('not HAS_H5PY or not HAS_YAML') def test_metadata_very_large(tmpdir): """Test that very large datasets work, now!""" test_file = str(tmpdir.join('test.hdf5')) t1 = Table() t1['a'] = Column(data=[1, 2, 3], unit="s") t1['a'].meta['a0'] = "A0" t1['a'].meta['a1'] = {"a1": [0, 1]} t1['a'].format = '7.3f' t1['a'].description = 'A column' t1.meta['b'] = 1 t1.meta['c'] = {"c0": [0, 1]} t1.meta["meta_big"] = "0" * (2 ** 16 + 1) t1.meta["meta_biggerstill"] = "0" * (2 ** 18) t1.write(test_file, path='the_table', serialize_meta=True, overwrite=True) t2 = Table.read(test_file, path='the_table') assert t1['a'].unit == t2['a'].unit assert t1['a'].format == t2['a'].format assert t1['a'].description == t2['a'].description assert t1['a'].meta == t2['a'].meta assert t1.meta == t2.meta @pytest.mark.skipif('not HAS_H5PY or not HAS_YAML') def test_metadata_very_large_fails_compatibility_mode(tmpdir): """Test that very large metadata do not work in compatibility mode.""" test_file = str(tmpdir.join('test.hdf5')) t1 = Table() t1['a'] = Column(data=[1, 2, 3]) t1.meta["meta"] = "0" * (2 ** 16 + 1) with catch_warnings() as w: t1.write(test_file, path='the_table', serialize_meta=True, overwrite=True, compatibility_mode=True) assert len(w) == 2 # Error message slightly changed in h5py 2.7.1, thus the 2part assert assert str(w[1].message).startswith( "Attributes could not be written to the output HDF5 " "file: Unable to create attribute ") assert "bject header message is too large" in str(w[1].message) @pytest.mark.skipif('not HAS_H5PY') def test_skip_meta(tmpdir): test_file = str(tmpdir.join('test.hdf5')) t1 = Table() t1.add_column(Column(name='a', data=[1, 2, 3])) t1.meta['a'] = 1 t1.meta['b'] = 'hello' t1.meta['c'] = 3.14159 t1.meta['d'] = True t1.meta['e'] = np.array([1, 2, 3]) t1.meta['f'] = str with catch_warnings() as w: t1.write(test_file, path='the_table') assert len(w) == 1 assert str(w[0].message).startswith( "Attribute `f` of type {0} cannot be written to HDF5 files - skipping".format(type(t1.meta['f']))) @pytest.mark.skipif('not HAS_H5PY or not HAS_YAML') def test_fail_meta_serialize(tmpdir): test_file = str(tmpdir.join('test.hdf5')) t1 = Table() t1.add_column(Column(name='a', data=[1, 2, 3])) t1.meta['f'] = str with pytest.raises(Exception) as err: t1.write(test_file, path='the_table', serialize_meta=True) assert "cannot represent an object: <class 'str'>" in str(err) @pytest.mark.skipif('not HAS_H5PY') def test_read_h5py_objects(tmpdir): # Regression test - ensure that Datasets are recognized automatically test_file = str(tmpdir.join('test.hdf5')) import h5py with h5py.File(test_file, 'w') as output_file: t1 = Table() t1.add_column(Column(name='a', data=[1, 2, 3])) t1.write(output_file, path='the_table') f = h5py.File(test_file) t2 = Table.read(f, path='the_table') assert np.all(t2['a'] == [1, 2, 3]) t3 = Table.read(f['/'], path='the_table') assert np.all(t3['a'] == [1, 2, 3]) t4 = Table.read(f['the_table']) assert np.all(t4['a'] == [1, 2, 3]) f.close() # don't raise an error in 'test --open-files' def assert_objects_equal(obj1, obj2, attrs, compare_class=True): if compare_class: assert obj1.__class__ is obj2.__class__ info_attrs = ['info.name', 'info.format', 'info.unit', 'info.description', 'info.meta'] for attr in attrs + info_attrs: a1 = obj1 a2 = obj2 for subattr in attr.split('.'): try: a1 = getattr(a1, subattr) a2 = getattr(a2, subattr) except AttributeError: a1 = a1[subattr] a2 = a2[subattr] # Mixin info.meta can None instead of empty OrderedDict(), #6720 would # fix this. if attr == 'info.meta': if a1 is None: a1 = {} if a2 is None: a2 = {} assert np.all(a1 == a2) # Testing HDF5 table read/write with mixins. This is mostly # copied from FITS mixin testing. el = EarthLocation(x=1 * u.km, y=3 * u.km, z=5 * u.km) el2 = EarthLocation(x=[1, 2] * u.km, y=[3, 4] * u.km, z=[5, 6] * u.km) sc = SkyCoord([1, 2], [3, 4], unit='deg,deg', frame='fk4', obstime='J1990.5') scc = sc.copy() scc.representation = 'cartesian' tm = Time([2450814.5, 2450815.5], format='jd', scale='tai', location=el) mixin_cols = { 'tm': tm, 'dt': TimeDelta([1, 2] * u.day), 'sc': sc, 'scc': scc, 'scd': SkyCoord([1, 2], [3, 4], [5, 6], unit='deg,deg,m', frame='fk4', obstime=['J1990.5', 'J1991.5']), 'q': [1, 2] * u.m, 'lat': Latitude([1, 2] * u.deg), 'lon': Longitude([1, 2] * u.deg, wrap_angle=180. * u.deg), 'ang': Angle([1, 2] * u.deg), 'el2': el2, } time_attrs = ['value', 'shape', 'format', 'scale', 'location'] compare_attrs = { 'c1': ['data'], 'c2': ['data'], 'tm': time_attrs, 'dt': ['shape', 'value', 'format', 'scale'], 'sc': ['ra', 'dec', 'representation', 'frame.name'], 'scc': ['x', 'y', 'z', 'representation', 'frame.name'], 'scd': ['ra', 'dec', 'distance', 'representation', 'frame.name'], 'q': ['value', 'unit'], 'lon': ['value', 'unit', 'wrap_angle'], 'lat': ['value', 'unit'], 'ang': ['value', 'unit'], 'el2': ['x', 'y', 'z', 'ellipsoid'], 'nd': ['x', 'y', 'z'], } @pytest.mark.skipif('not HAS_H5PY or not HAS_YAML') def test_hdf5_mixins_qtable_to_table(tmpdir): """Test writing as QTable and reading as Table. Ensure correct classes come out. """ filename = str(tmpdir.join('test_simple.hdf5')) names = sorted(mixin_cols) t = QTable([mixin_cols[name] for name in names], names=names) t.write(filename, format='hdf5', path='root', serialize_meta=True) t2 = Table.read(filename, format='hdf5', path='root') assert t.colnames == t2.colnames for name, col in t.columns.items(): col2 = t2[name] # Special-case Time, which does not yet support round-tripping # the format. if isinstance(col2, Time): col2.format = col.format attrs = compare_attrs[name] compare_class = True if isinstance(col.info, QuantityInfo): # Downgrade Quantity to Column + unit assert type(col2) is Column # Class-specific attributes like `value` or `wrap_angle` are lost. attrs = ['unit'] compare_class = False # Compare data values here (assert_objects_equal doesn't know how in this case) assert np.all(col.value == col2) assert_objects_equal(col, col2, attrs, compare_class) @pytest.mark.skipif('not HAS_H5PY or not HAS_YAML') @pytest.mark.parametrize('table_cls', (Table, QTable)) def test_hdf5_mixins_as_one(table_cls, tmpdir): """Test write/read all cols at once and validate intermediate column names""" filename = str(tmpdir.join('test_simple.hdf5')) names = sorted(mixin_cols) serialized_names = ['ang', 'dt.jd1', 'dt.jd2', 'el2.x', 'el2.y', 'el2.z', 'lat', 'lon', 'q', 'sc.ra', 'sc.dec', 'scc.x', 'scc.y', 'scc.z', 'scd.ra', 'scd.dec', 'scd.distance', 'scd.obstime.jd1', 'scd.obstime.jd2', 'tm.jd1', 'tm.jd2', ] t = table_cls([mixin_cols[name] for name in names], names=names) t.meta['C'] = 'spam' t.meta['comments'] = ['this', 'is', 'a', 'comment'] t.meta['history'] = ['first', 'second', 'third'] t.write(filename, format="hdf5", path='root', serialize_meta=True) t2 = table_cls.read(filename, format='hdf5', path='root') assert t2.meta['C'] == 'spam' assert t2.meta['comments'] == ['this', 'is', 'a', 'comment'] assert t2.meta['history'] == ['first', 'second', 'third'] assert t.colnames == t2.colnames # Read directly via hdf5 and confirm column names h5 = h5py.File(filename, 'r') assert list(h5['root'].dtype.names) == serialized_names h5.close() @pytest.mark.skipif('not HAS_H5PY or not HAS_YAML') @pytest.mark.parametrize('name_col', list(mixin_cols.items())) @pytest.mark.parametrize('table_cls', (Table, QTable)) def test_hdf5_mixins_per_column(table_cls, name_col, tmpdir): """Test write/read one col at a time and do detailed validation""" filename = str(tmpdir.join('test_simple.hdf5')) name, col = name_col c = [1.0, 2.0] t = table_cls([c, col, c], names=['c1', name, 'c2']) t[name].info.description = 'my description' t[name].info.meta = {'list': list(range(50)), 'dict': {'a': 'b' * 200}} if not t.has_mixin_columns: pytest.skip('column is not a mixin (e.g. Quantity subclass in Table)') if isinstance(t[name], NdarrayMixin): pytest.xfail('NdarrayMixin not supported') t.write(filename, format="hdf5", path='root', serialize_meta=True) t2 = table_cls.read(filename, format='hdf5', path='root') assert t.colnames == t2.colnames for colname in t.colnames: assert_objects_equal(t[colname], t2[colname], compare_attrs[colname]) # Special case to make sure Column type doesn't leak into Time class data if name.startswith('tm'): assert t2[name]._time.jd1.__class__ is np.ndarray assert t2[name]._time.jd2.__class__ is np.ndarray @pytest.mark.skipif('HAS_YAML or not HAS_H5PY') def test_warn_for_dropped_info_attributes(tmpdir): filename = str(tmpdir.join('test.hdf5')) t = Table([[1, 2]]) t['col0'].info.description = 'hello' with catch_warnings() as warns: t.write(filename, path='root', serialize_meta=False) assert len(warns) == 1 assert str(warns[0].message).startswith( "table contains column(s) with defined 'unit'") @pytest.mark.skipif('HAS_YAML or not HAS_H5PY') def test_error_for_mixins_but_no_yaml(tmpdir): filename = str(tmpdir.join('test.hdf5')) t = Table([mixin_cols['sc']]) with pytest.raises(TypeError) as err: t.write(filename, path='root', serialize_meta=True) assert "cannot write type SkyCoord column 'col0' to HDF5 without PyYAML" in str(err)
funbaker/astropy
astropy/io/misc/tests/test_hdf5.py
astropy/io/misc/asdf/tags/transform/polynomial.py
""" Utilities for computing periodogram statistics. This is an internal module; users should access this functionality via the ``false_alarm_probability`` and ``false_alarm_level`` methods of the ``astropy.stats.LombScargle`` API. """ from functools import wraps import numpy as np def _weighted_sum(val, dy): if dy is not None: return (val / dy ** 2).sum() else: return val.sum() def _weighted_mean(val, dy): if dy is None: return val.mean() else: return _weighted_sum(val, dy) / _weighted_sum(np.ones_like(val), dy) def _weighted_var(val, dy): return _weighted_mean(val ** 2, dy) - _weighted_mean(val, dy) ** 2 def _gamma(N): from scipy.special import gammaln # Note: this is closely approximated by (1 - 0.75 / N) for large N return np.sqrt(2 / N) * np.exp(gammaln(N / 2) - gammaln((N - 1) / 2)) def _log_gamma(N): from scipy.special import gammaln return 0.5 * np.log(2 / N) + gammaln(N / 2) - gammaln((N - 1) / 2) def vectorize_first_argument(func): @wraps(func) def new_func(x, *args, **kwargs): x = np.asarray(x) return np.array([func(xi, *args, **kwargs) for xi in x.flat]).reshape(x.shape) return new_func def pdf_single(z, N, normalization, dH=1, dK=3): """Probability density function for Lomb-Scargle periodogram Compute the expected probability density function of the periodogram for the null hypothesis - i.e. data consisting of Gaussian noise. Parameters ---------- z : array-like The periodogram value. N : int The number of data points from which the periodogram was computed. normalization : {'standard', 'model', 'log', 'psd'} The periodogram normalization. dH, dK : integers, optional The number of parameters in the null hypothesis and the model. Returns ------- pdf : np.ndarray The expected probability density function. Notes ----- For normalization='psd', the distribution can only be computed for periodograms constructed with errors specified. All expressions used here are adapted from Table 1 of Baluev 2008 [1]_. References ---------- .. [1] Baluev, R.V. MNRAS 385, 1279 (2008) """ z = np.asarray(z) if dK - dH != 2: raise NotImplementedError("Degrees of freedom != 2") Nk = N - dK if normalization == 'psd': return np.exp(-z) elif normalization == 'standard': return 0.5 * Nk * (1 - z) ** (0.5 * Nk - 1) elif normalization == 'model': return 0.5 * Nk * (1 + z) ** (-0.5 * Nk - 1) elif normalization == 'log': return 0.5 * Nk * np.exp(-0.5 * Nk * z) else: raise ValueError("normalization='{0}' is not recognized" "".format(normalization)) def fap_single(z, N, normalization, dH=1, dK=3): """Single-frequency false alarm probability for the Lomb-Scargle periodogram This is equal to 1 - cdf, where cdf is the cumulative distribution. The single-frequency false alarm probability should not be confused with the false alarm probability for the largest peak. Parameters ---------- z : array-like The periodogram value. N : int The number of data points from which the periodogram was computed. normalization : {'standard', 'model', 'log', 'psd'} The periodogram normalization. dH, dK : integers, optional The number of parameters in the null hypothesis and the model. Returns ------- false_alarm_probability : np.ndarray The single-frequency false alarm probability. Notes ----- For normalization='psd', the distribution can only be computed for periodograms constructed with errors specified. All expressions used here are adapted from Table 1 of Baluev 2008 [1]_. References ---------- .. [1] Baluev, R.V. MNRAS 385, 1279 (2008) """ z = np.asarray(z) if dK - dH != 2: raise NotImplementedError("Degrees of freedom != 2") Nk = N - dK if normalization == 'psd': return np.exp(-z) elif normalization == 'standard': return (1 - z) ** (0.5 * Nk) elif normalization == 'model': return (1 + z) ** (-0.5 * Nk) elif normalization == 'log': return np.exp(-0.5 * Nk * z) else: raise ValueError("normalization='{0}' is not recognized" "".format(normalization)) def inv_fap_single(fap, N, normalization, dH=1, dK=3): """Single-frequency inverse false alarm probability This function computes the periodogram value associated with the specified single-frequency false alarm probability. This should not be confused with the false alarm level of the largest peak. Parameters ---------- fap : array-like The false alarm probability. N : int The number of data points from which the periodogram was computed. normalization : {'standard', 'model', 'log', 'psd'} The periodogram normalization. dH, dK : integers, optional The number of parameters in the null hypothesis and the model. Returns ------- z : np.ndarray The periodogram power corresponding to the single-peak false alarm probability. Notes ----- For normalization='psd', the distribution can only be computed for periodograms constructed with errors specified. All expressions used here are adapted from Table 1 of Baluev 2008 [1]_. References ---------- .. [1] Baluev, R.V. MNRAS 385, 1279 (2008) """ fap = np.asarray(fap) if dK - dH != 2: raise NotImplementedError("Degrees of freedom != 2") Nk = N - dK if normalization == 'psd': return -np.log(fap) elif normalization == 'standard': return 1 - fap ** (2 / Nk) elif normalization == 'model': return -1 + fap ** (-2 / Nk) elif normalization == 'log': return -2 / Nk * np.log(fap) else: raise ValueError("normalization='{0}' is not recognized" "".format(normalization)) def cdf_single(z, N, normalization, dH=1, dK=3): """Cumulative distribution for the Lomb-Scargle periodogram Compute the expected cumulative distribution of the periodogram for the null hypothesis - i.e. data consisting of Gaussian noise. Parameters ---------- z : array-like The periodogram value. N : int The number of data points from which the periodogram was computed. normalization : {'standard', 'model', 'log', 'psd'} The periodogram normalization. dH, dK : integers, optional The number of parameters in the null hypothesis and the model. Returns ------- cdf : np.ndarray The expected cumulative distribution function. Notes ----- For normalization='psd', the distribution can only be computed for periodograms constructed with errors specified. All expressions used here are adapted from Table 1 of Baluev 2008 [1]_. References ---------- .. [1] Baluev, R.V. MNRAS 385, 1279 (2008) """ return 1 - fap_single(z, N, normalization=normalization, dH=dH, dK=dK) def tau_davies(Z, fmax, t, y, dy, normalization='standard', dH=1, dK=3): """tau factor for estimating Davies bound (Baluev 2008, Table 1)""" N = len(t) NH = N - dH # DOF for null hypothesis NK = N - dK # DOF for periodic hypothesis Dt = _weighted_var(t, dy) Teff = np.sqrt(4 * np.pi * Dt) # Effective baseline W = fmax * Teff Z = np.asarray(Z) if normalization == 'psd': # 'psd' normalization is same as Baluev's z return W * np.exp(-Z) * np.sqrt(Z) elif normalization == 'standard': # 'standard' normalization is Z = 2/NH * z_1 return (_gamma(NH) * W * (1 - Z) ** (0.5 * (NK - 1)) * np.sqrt(0.5 * NH * Z)) elif normalization == 'model': # 'model' normalization is Z = 2/NK * z_2 return (_gamma(NK) * W * (1 + Z) ** (-0.5 * NK) * np.sqrt(0.5 * NK * Z)) elif normalization == 'log': # 'log' normalization is Z = 2/NK * z_3 return (_gamma(NK) * W * np.exp(-0.5 * Z * (NK - 0.5)) * np.sqrt(NK * np.sinh(0.5 * Z))) else: raise NotImplementedError("normalization={0}".format(normalization)) def fap_naive(Z, fmax, t, y, dy, normalization='standard'): """False Alarm Probability based on estimated number of indep frequencies""" N = len(t) T = max(t) - min(t) N_eff = fmax * T fap_s = fap_single(Z, N, normalization=normalization) # result is 1 - (1 - fap_s) ** N_eff # this is much more precise for small Z / large N return -np.expm1(N_eff * np.log1p(-fap_s)) def inv_fap_naive(fap, fmax, t, y, dy, normalization='standard'): """Inverse FAP based on estimated number of indep frequencies""" fap = np.asarray(fap) N = len(t) T = max(t) - min(t) N_eff = fmax * T #fap_s = 1 - (1 - fap) ** (1 / N_eff) fap_s = -np.expm1(np.log(1 - fap) / N_eff) return inv_fap_single(fap_s, N, normalization) def fap_davies(Z, fmax, t, y, dy, normalization='standard'): """Davies upper-bound to the false alarm probability (Eqn 5 of Baluev 2008) """ N = len(t) fap_s = fap_single(Z, N, normalization=normalization) tau = tau_davies(Z, fmax, t, y, dy, normalization=normalization) return fap_s + tau @vectorize_first_argument def inv_fap_davies(p, fmax, t, y, dy, normalization='standard'): """Inverse of the davies upper-bound""" from scipy import optimize args = (fmax, t, y, dy, normalization) z0 = inv_fap_naive(p, *args) func = lambda z, *args: fap_davies(z, *args) - p res = optimize.root(func, z0, args=args, method='lm') if not res.success: raise ValueError('inv_fap_baluev did not converge for p={0}'.format(p)) return res.x def fap_baluev(Z, fmax, t, y, dy, normalization='standard'): """Alias-free approximation to false alarm probability (Eqn 6 of Baluev 2008) """ fap_s = fap_single(Z, len(t), normalization) tau = tau_davies(Z, fmax, t, y, dy, normalization=normalization) # result is 1 - (1 - fap_s) * np.exp(-tau) # this is much more precise for small numbers return -np.expm1(-tau) + fap_s * np.exp(-tau) @vectorize_first_argument def inv_fap_baluev(p, fmax, t, y, dy, normalization='standard'): """Inverse of the Baluev alias-free approximation""" from scipy import optimize args = (fmax, t, y, dy, normalization) z0 = inv_fap_naive(p, *args) func = lambda z, *args: fap_baluev(z, *args) - p res = optimize.root(func, z0, args=args, method='lm') if not res.success: raise ValueError('inv_fap_baluev did not converge for p={0}'.format(p)) return res.x def _bootstrap_max(t, y, dy, fmax, normalization, random_seed): """Generate a sequence of bootstrap estimates of the max""" from .core import LombScargle rng = np.random.RandomState(random_seed) while True: s = rng.randint(0, len(y), len(y)) # sample with replacement ls_boot = LombScargle(t, y[s], dy if dy is None else dy[s], normalization=normalization) freq, power = ls_boot.autopower(maximum_frequency=fmax) yield power.max() def fap_bootstrap(Z, fmax, t, y, dy, normalization='standard', n_bootstraps=1000, random_seed=None): """Bootstrap estimate of the false alarm probability""" pmax = np.fromiter(_bootstrap_max(t, y, dy, fmax, normalization, random_seed), float, n_bootstraps) pmax.sort() return 1 - np.searchsorted(pmax, Z) / len(pmax) def inv_fap_bootstrap(fap, fmax, t, y, dy, normalization='standard', n_bootstraps=1000, random_seed=None): """Bootstrap estimate of the inverse false alarm probability""" fap = np.asarray(fap) pmax = np.fromiter(_bootstrap_max(t, y, dy, fmax, normalization, random_seed), float, n_bootstraps) pmax.sort() return pmax[np.clip(np.floor((1 - fap) * len(pmax)).astype(int), 0, len(pmax) - 1)] METHODS = {'single': fap_single, 'naive': fap_naive, 'davies': fap_davies, 'baluev': fap_baluev, 'bootstrap': fap_bootstrap} def false_alarm_probability(Z, fmax, t, y, dy, normalization='standard', method='baluev', method_kwds=None): """Compute the approximate false alarm probability for periodogram peaks Z This gives an estimate of the false alarm probability for the largest value in a periodogram, based on the null hypothesis of non-varying data with Gaussian noise. The true probability cannot be computed analytically, so each method available here is an approximation to the true value. Parameters ---------- Z : array-like The periodogram value. fmax : float The maximum frequency of the periodogram. t, y, dy : array-like The data times, values, and errors. normalization : {'standard', 'model', 'log', 'psd'}, optional The periodogram normalization. method : {'baluev', 'davies', 'naive', 'bootstrap'}, optional The approximation method to use. method_kwds : dict, optional Additional method-specific keywords. Returns ------- false_alarm_probability : np.ndarray The false alarm probability. Notes ----- For normalization='psd', the distribution can only be computed for periodograms constructed with errors specified. See Also -------- false_alarm_level : compute the periodogram level for a particular fap References ---------- .. [1] Baluev, R.V. MNRAS 385, 1279 (2008) """ if method == 'single': return fap_single(Z, len(t), normalization) elif method not in METHODS: raise ValueError("Unrecognized method: {0}".format(method)) method = METHODS[method] method_kwds = method_kwds or {} return method(Z, fmax, t, y, dy, normalization, **method_kwds) INV_METHODS = {'single': inv_fap_single, 'naive': inv_fap_naive, 'davies': inv_fap_davies, 'baluev': inv_fap_baluev, 'bootstrap': inv_fap_bootstrap} def false_alarm_level(p, fmax, t, y, dy, normalization, method='baluev', method_kwds=None): """Compute the approximate periodogram level given a false alarm probability This gives an estimate of the periodogram level corresponding to a specified false alarm probability for the largest peak, assuming a null hypothesis of non-varying data with Gaussian noise. The true level cannot be computed analytically, so each method available here is an approximation to the true value. Parameters ---------- p : array-like The false alarm probability (0 < p < 1). fmax : float The maximum frequency of the periodogram. t, y, dy : arrays The data times, values, and errors. normalization : {'standard', 'model', 'log', 'psd'}, optional The periodogram normalization. method : {'baluev', 'davies', 'naive', 'bootstrap'}, optional The approximation method to use. method_kwds : dict, optional Additional method-specific keywords. Returns ------- z : np.ndarray The periodogram level. Notes ----- For normalization='psd', the distribution can only be computed for periodograms constructed with errors specified. See Also -------- false_alarm_probability : compute the fap for a given periodogram level References ---------- .. [1] Baluev, R.V. MNRAS 385, 1279 (2008) """ if method == 'single': return inv_fap_single(p, len(t), normalization) elif method not in INV_METHODS: raise ValueError("Unrecognized method: {0}".format(method)) method = INV_METHODS[method] method_kwds = method_kwds or {} return method(p, fmax, t, y, dy, normalization, **method_kwds)
# Licensed under a 3-clause BSD style license - see LICENSE.rst import pytest import numpy as np from ....tests.helper import catch_warnings from ....table import Table, QTable, NdarrayMixin, Column from .... import units as u from ....coordinates import SkyCoord, Latitude, Longitude, Angle, EarthLocation from ....time import Time, TimeDelta from ....units.quantity import QuantityInfo try: import h5py except ImportError: HAS_H5PY = False else: HAS_H5PY = True try: import yaml except ImportError: HAS_YAML = False else: HAS_YAML = True ALL_DTYPES = [np.uint8, np.uint16, np.uint32, np.uint64, np.int8, np.int16, np.int32, np.int64, np.float32, np.float64, np.bool_, '|S3'] def _default_values(dtype): if dtype == np.bool_: return [0, 1, 1] elif dtype == '|S3': return [b'abc', b'def', b'ghi'] else: return [1, 2, 3] @pytest.mark.skipif('not HAS_H5PY') def test_write_nopath(tmpdir): test_file = str(tmpdir.join('test.hdf5')) t1 = Table() t1.add_column(Column(name='a', data=[1, 2, 3])) with pytest.raises(ValueError) as exc: t1.write(test_file) assert exc.value.args[0] == "table path should be set via the path= argument" @pytest.mark.skipif('not HAS_H5PY') def test_read_notable_nopath(tmpdir): test_file = str(tmpdir.join('test.hdf5')) h5py.File(test_file, 'w').close() # create empty file with pytest.raises(ValueError) as exc: t1 = Table.read(test_file, path='/', format='hdf5') assert exc.value.args[0] == 'no table found in HDF5 group /' @pytest.mark.skipif('not HAS_H5PY') def test_read_nopath(tmpdir): test_file = str(tmpdir.join('test.hdf5')) t1 = Table() t1.add_column(Column(name='a', data=[1, 2, 3])) t1.write(test_file, path='the_table') t2 = Table.read(test_file) assert np.all(t1['a'] == t2['a']) @pytest.mark.skipif('not HAS_H5PY') def test_write_invalid_path(tmpdir): test_file = str(tmpdir.join('test.hdf5')) t1 = Table() t1.add_column(Column(name='a', data=[1, 2, 3])) with pytest.raises(ValueError) as exc: t1.write(test_file, path='test/') assert exc.value.args[0] == "table path should end with table name, not /" @pytest.mark.skipif('not HAS_H5PY') def test_read_invalid_path(tmpdir): test_file = str(tmpdir.join('test.hdf5')) t1 = Table() t1.add_column(Column(name='a', data=[1, 2, 3])) t1.write(test_file, path='the_table') with pytest.raises(OSError) as exc: Table.read(test_file, path='test/') assert exc.value.args[0] == "Path test/ does not exist" @pytest.mark.skipif('not HAS_H5PY') def test_read_missing_group(tmpdir): test_file = str(tmpdir.join('test.hdf5')) h5py.File(test_file, 'w').close() # create empty file with pytest.raises(OSError) as exc: Table.read(test_file, path='test/path/table') assert exc.value.args[0] == "Path test/path/table does not exist" @pytest.mark.skipif('not HAS_H5PY') def test_read_missing_table(tmpdir): test_file = str(tmpdir.join('test.hdf5')) with h5py.File(test_file, 'w') as f: f.create_group('test').create_group('path') with pytest.raises(OSError) as exc: Table.read(test_file, path='test/path/table') assert exc.value.args[0] == "Path test/path/table does not exist" @pytest.mark.skipif('not HAS_H5PY') def test_read_missing_group_fileobj(tmpdir): test_file = str(tmpdir.join('test.hdf5')) with h5py.File(test_file, 'w') as f: with pytest.raises(OSError) as exc: Table.read(f, path='test/path/table') assert exc.value.args[0] == "Path test/path/table does not exist" @pytest.mark.skipif('not HAS_H5PY') def test_read_write_simple(tmpdir): test_file = str(tmpdir.join('test.hdf5')) t1 = Table() t1.add_column(Column(name='a', data=[1, 2, 3])) t1.write(test_file, path='the_table') t2 = Table.read(test_file, path='the_table') assert np.all(t2['a'] == [1, 2, 3]) @pytest.mark.skipif('not HAS_H5PY') def test_read_write_existing_table(tmpdir): test_file = str(tmpdir.join('test.hdf5')) t1 = Table() t1.add_column(Column(name='a', data=[1, 2, 3])) t1.write(test_file, path='the_table') with pytest.raises(OSError) as exc: t1.write(test_file, path='the_table', append=True) assert exc.value.args[0] == "Table the_table already exists" @pytest.mark.skipif('not HAS_H5PY') def test_read_write_memory(tmpdir): with h5py.File('test', 'w', driver='core', backing_store=False) as output_file: t1 = Table() t1.add_column(Column(name='a', data=[1, 2, 3])) t1.write(output_file, path='the_table') t2 = Table.read(output_file, path='the_table') assert np.all(t2['a'] == [1, 2, 3]) @pytest.mark.skipif('not HAS_H5PY') def test_read_write_existing(tmpdir): test_file = str(tmpdir.join('test.hdf5')) h5py.File(test_file, 'w').close() # create empty file t1 = Table() t1.add_column(Column(name='a', data=[1, 2, 3])) with pytest.raises(OSError) as exc: t1.write(test_file, path='the_table') assert exc.value.args[0].startswith("File exists:") @pytest.mark.skipif('not HAS_H5PY') def test_read_write_existing_overwrite(tmpdir): test_file = str(tmpdir.join('test.hdf5')) h5py.File(test_file, 'w').close() # create empty file t1 = Table() t1.add_column(Column(name='a', data=[1, 2, 3])) t1.write(test_file, path='the_table', overwrite=True) t2 = Table.read(test_file, path='the_table') assert np.all(t2['a'] == [1, 2, 3]) @pytest.mark.skipif('not HAS_H5PY') def test_read_write_existing_append(tmpdir): test_file = str(tmpdir.join('test.hdf5')) h5py.File(test_file, 'w').close() # create empty file t1 = Table() t1.add_column(Column(name='a', data=[1, 2, 3])) t1.write(test_file, path='the_table_1', append=True) t1.write(test_file, path='the_table_2', append=True) t2 = Table.read(test_file, path='the_table_1') assert np.all(t2['a'] == [1, 2, 3]) t3 = Table.read(test_file, path='the_table_2') assert np.all(t3['a'] == [1, 2, 3]) @pytest.mark.skipif('not HAS_H5PY') def test_read_write_existing_append_groups(tmpdir): test_file = str(tmpdir.join('test.hdf5')) with h5py.File(test_file, 'w') as f: f.create_group('test_1') t1 = Table() t1.add_column(Column(name='a', data=[1, 2, 3])) t1.write(test_file, path='test_1/the_table_1', append=True) t1.write(test_file, path='test_2/the_table_2', append=True) t2 = Table.read(test_file, path='test_1/the_table_1') assert np.all(t2['a'] == [1, 2, 3]) t3 = Table.read(test_file, path='test_2/the_table_2') assert np.all(t3['a'] == [1, 2, 3]) @pytest.mark.skipif('not HAS_H5PY') def test_read_write_existing_append_overwrite(tmpdir): test_file = str(tmpdir.join('test.hdf5')) t1 = Table() t1.add_column(Column(name='a', data=[1, 2, 3])) t1.write(test_file, path='table1') t1.write(test_file, path='table2', append=True) t1v2 = Table() t1v2.add_column(Column(name='a', data=[4, 5, 6])) with pytest.raises(OSError) as exc: t1v2.write(test_file, path='table1', append=True) assert exc.value.args[0] == 'Table table1 already exists' t1v2.write(test_file, path='table1', append=True, overwrite=True) t2 = Table.read(test_file, path='table1') assert np.all(t2['a'] == [4, 5, 6]) t3 = Table.read(test_file, path='table2') assert np.all(t3['a'] == [1, 2, 3]) @pytest.mark.skipif('not HAS_H5PY') def test_read_fileobj(tmpdir): test_file = str(tmpdir.join('test.hdf5')) t1 = Table() t1.add_column(Column(name='a', data=[1, 2, 3])) t1.write(test_file, path='the_table') import h5py with h5py.File(test_file, 'r') as input_file: t2 = Table.read(input_file, path='the_table') assert np.all(t2['a'] == [1, 2, 3]) @pytest.mark.skipif('not HAS_H5PY') def test_read_filobj_path(tmpdir): test_file = str(tmpdir.join('test.hdf5')) t1 = Table() t1.add_column(Column(name='a', data=[1, 2, 3])) t1.write(test_file, path='path/to/data/the_table') import h5py with h5py.File(test_file, 'r') as input_file: t2 = Table.read(input_file, path='path/to/data/the_table') assert np.all(t2['a'] == [1, 2, 3]) @pytest.mark.skipif('not HAS_H5PY') def test_read_filobj_group_path(tmpdir): test_file = str(tmpdir.join('test.hdf5')) t1 = Table() t1.add_column(Column(name='a', data=[1, 2, 3])) t1.write(test_file, path='path/to/data/the_table') import h5py with h5py.File(test_file, 'r') as input_file: t2 = Table.read(input_file['path/to'], path='data/the_table') assert np.all(t2['a'] == [1, 2, 3]) @pytest.mark.skipif('not HAS_H5PY') def test_read_wrong_fileobj(): class FakeFile: def read(self): pass f = FakeFile() with pytest.raises(TypeError) as exc: t1 = Table.read(f, format='hdf5') assert exc.value.args[0] == 'h5py can only open regular files' @pytest.mark.skipif('not HAS_H5PY') def test_write_fileobj(tmpdir): test_file = str(tmpdir.join('test.hdf5')) import h5py with h5py.File(test_file, 'w') as output_file: t1 = Table() t1.add_column(Column(name='a', data=[1, 2, 3])) t1.write(output_file, path='the_table') t2 = Table.read(test_file, path='the_table') assert np.all(t2['a'] == [1, 2, 3]) @pytest.mark.skipif('not HAS_H5PY') def test_write_filobj_group(tmpdir): test_file = str(tmpdir.join('test.hdf5')) import h5py with h5py.File(test_file, 'w') as output_file: t1 = Table() t1.add_column(Column(name='a', data=[1, 2, 3])) t1.write(output_file, path='path/to/data/the_table') t2 = Table.read(test_file, path='path/to/data/the_table') assert np.all(t2['a'] == [1, 2, 3]) @pytest.mark.skipif('not HAS_H5PY') def test_write_wrong_type(): t1 = Table() t1.add_column(Column(name='a', data=[1, 2, 3])) with pytest.raises(TypeError) as exc: t1.write(1212, path='path/to/data/the_table', format='hdf5') assert exc.value.args[0] == ('output should be a string ' 'or an h5py File or Group object') @pytest.mark.skipif('not HAS_H5PY') @pytest.mark.parametrize(('dtype'), ALL_DTYPES) def test_preserve_single_dtypes(tmpdir, dtype): test_file = str(tmpdir.join('test.hdf5')) values = _default_values(dtype) t1 = Table() t1.add_column(Column(name='a', data=np.array(values, dtype=dtype))) t1.write(test_file, path='the_table') t2 = Table.read(test_file, path='the_table') assert np.all(t2['a'] == values) assert t2['a'].dtype == dtype @pytest.mark.skipif('not HAS_H5PY') def test_preserve_all_dtypes(tmpdir): test_file = str(tmpdir.join('test.hdf5')) t1 = Table() for dtype in ALL_DTYPES: values = _default_values(dtype) t1.add_column(Column(name=str(dtype), data=np.array(values, dtype=dtype))) t1.write(test_file, path='the_table') t2 = Table.read(test_file, path='the_table') for dtype in ALL_DTYPES: values = _default_values(dtype) assert np.all(t2[str(dtype)] == values) assert t2[str(dtype)].dtype == dtype @pytest.mark.skipif('not HAS_H5PY') def test_preserve_meta(tmpdir): test_file = str(tmpdir.join('test.hdf5')) t1 = Table() t1.add_column(Column(name='a', data=[1, 2, 3])) t1.meta['a'] = 1 t1.meta['b'] = 'hello' t1.meta['c'] = 3.14159 t1.meta['d'] = True t1.meta['e'] = np.array([1, 2, 3]) t1.write(test_file, path='the_table') t2 = Table.read(test_file, path='the_table') for key in t1.meta: assert np.all(t1.meta[key] == t2.meta[key]) @pytest.mark.skipif('not HAS_H5PY or not HAS_YAML') def test_preserve_serialized(tmpdir): test_file = str(tmpdir.join('test.hdf5')) t1 = Table() t1['a'] = Column(data=[1, 2, 3], unit="s") t1['a'].meta['a0'] = "A0" t1['a'].meta['a1'] = {"a1": [0, 1]} t1['a'].format = '7.3f' t1['a'].description = 'A column' t1.meta['b'] = 1 t1.meta['c'] = {"c0": [0, 1]} t1.write(test_file, path='the_table', serialize_meta=True, overwrite=True) t2 = Table.read(test_file, path='the_table') assert t1['a'].unit == t2['a'].unit assert t1['a'].format == t2['a'].format assert t1['a'].description == t2['a'].description assert t1['a'].meta == t2['a'].meta assert t1.meta == t2.meta @pytest.mark.skipif('not HAS_H5PY or not HAS_YAML') def test_preserve_serialized_compatibility_mode(tmpdir): test_file = str(tmpdir.join('test.hdf5')) t1 = Table() t1['a'] = Column(data=[1, 2, 3], unit="s") t1['a'].meta['a0'] = "A0" t1['a'].meta['a1'] = {"a1": [0, 1]} t1['a'].format = '7.3f' t1['a'].description = 'A column' t1.meta['b'] = 1 t1.meta['c'] = {"c0": [0, 1]} with catch_warnings() as w: t1.write(test_file, path='the_table', serialize_meta=True, overwrite=True, compatibility_mode=True) assert str(w[0].message).startswith( "compatibility mode for writing is deprecated") t2 = Table.read(test_file, path='the_table') assert t1['a'].unit == t2['a'].unit assert t1['a'].format == t2['a'].format assert t1['a'].description == t2['a'].description assert t1['a'].meta == t2['a'].meta assert t1.meta == t2.meta @pytest.mark.skipif('not HAS_H5PY or not HAS_YAML') def test_preserve_serialized_in_complicated_path(tmpdir): test_file = str(tmpdir.join('test.hdf5')) t1 = Table() t1['a'] = Column(data=[1, 2, 3], unit="s") t1['a'].meta['a0'] = "A0" t1['a'].meta['a1'] = {"a1": [0, 1]} t1['a'].format = '7.3f' t1['a'].description = 'A column' t1.meta['b'] = 1 t1.meta['c'] = {"c0": [0, 1]} t1.write(test_file, path='the_table/complicated/path', serialize_meta=True, overwrite=True) t2 = Table.read(test_file, path='the_table/complicated/path') assert t1['a'].format == t2['a'].format assert t1['a'].unit == t2['a'].unit assert t1['a'].description == t2['a'].description assert t1['a'].meta == t2['a'].meta assert t1.meta == t2.meta @pytest.mark.skipif('not HAS_H5PY or not HAS_YAML') def test_metadata_very_large(tmpdir): """Test that very large datasets work, now!""" test_file = str(tmpdir.join('test.hdf5')) t1 = Table() t1['a'] = Column(data=[1, 2, 3], unit="s") t1['a'].meta['a0'] = "A0" t1['a'].meta['a1'] = {"a1": [0, 1]} t1['a'].format = '7.3f' t1['a'].description = 'A column' t1.meta['b'] = 1 t1.meta['c'] = {"c0": [0, 1]} t1.meta["meta_big"] = "0" * (2 ** 16 + 1) t1.meta["meta_biggerstill"] = "0" * (2 ** 18) t1.write(test_file, path='the_table', serialize_meta=True, overwrite=True) t2 = Table.read(test_file, path='the_table') assert t1['a'].unit == t2['a'].unit assert t1['a'].format == t2['a'].format assert t1['a'].description == t2['a'].description assert t1['a'].meta == t2['a'].meta assert t1.meta == t2.meta @pytest.mark.skipif('not HAS_H5PY or not HAS_YAML') def test_metadata_very_large_fails_compatibility_mode(tmpdir): """Test that very large metadata do not work in compatibility mode.""" test_file = str(tmpdir.join('test.hdf5')) t1 = Table() t1['a'] = Column(data=[1, 2, 3]) t1.meta["meta"] = "0" * (2 ** 16 + 1) with catch_warnings() as w: t1.write(test_file, path='the_table', serialize_meta=True, overwrite=True, compatibility_mode=True) assert len(w) == 2 # Error message slightly changed in h5py 2.7.1, thus the 2part assert assert str(w[1].message).startswith( "Attributes could not be written to the output HDF5 " "file: Unable to create attribute ") assert "bject header message is too large" in str(w[1].message) @pytest.mark.skipif('not HAS_H5PY') def test_skip_meta(tmpdir): test_file = str(tmpdir.join('test.hdf5')) t1 = Table() t1.add_column(Column(name='a', data=[1, 2, 3])) t1.meta['a'] = 1 t1.meta['b'] = 'hello' t1.meta['c'] = 3.14159 t1.meta['d'] = True t1.meta['e'] = np.array([1, 2, 3]) t1.meta['f'] = str with catch_warnings() as w: t1.write(test_file, path='the_table') assert len(w) == 1 assert str(w[0].message).startswith( "Attribute `f` of type {0} cannot be written to HDF5 files - skipping".format(type(t1.meta['f']))) @pytest.mark.skipif('not HAS_H5PY or not HAS_YAML') def test_fail_meta_serialize(tmpdir): test_file = str(tmpdir.join('test.hdf5')) t1 = Table() t1.add_column(Column(name='a', data=[1, 2, 3])) t1.meta['f'] = str with pytest.raises(Exception) as err: t1.write(test_file, path='the_table', serialize_meta=True) assert "cannot represent an object: <class 'str'>" in str(err) @pytest.mark.skipif('not HAS_H5PY') def test_read_h5py_objects(tmpdir): # Regression test - ensure that Datasets are recognized automatically test_file = str(tmpdir.join('test.hdf5')) import h5py with h5py.File(test_file, 'w') as output_file: t1 = Table() t1.add_column(Column(name='a', data=[1, 2, 3])) t1.write(output_file, path='the_table') f = h5py.File(test_file) t2 = Table.read(f, path='the_table') assert np.all(t2['a'] == [1, 2, 3]) t3 = Table.read(f['/'], path='the_table') assert np.all(t3['a'] == [1, 2, 3]) t4 = Table.read(f['the_table']) assert np.all(t4['a'] == [1, 2, 3]) f.close() # don't raise an error in 'test --open-files' def assert_objects_equal(obj1, obj2, attrs, compare_class=True): if compare_class: assert obj1.__class__ is obj2.__class__ info_attrs = ['info.name', 'info.format', 'info.unit', 'info.description', 'info.meta'] for attr in attrs + info_attrs: a1 = obj1 a2 = obj2 for subattr in attr.split('.'): try: a1 = getattr(a1, subattr) a2 = getattr(a2, subattr) except AttributeError: a1 = a1[subattr] a2 = a2[subattr] # Mixin info.meta can None instead of empty OrderedDict(), #6720 would # fix this. if attr == 'info.meta': if a1 is None: a1 = {} if a2 is None: a2 = {} assert np.all(a1 == a2) # Testing HDF5 table read/write with mixins. This is mostly # copied from FITS mixin testing. el = EarthLocation(x=1 * u.km, y=3 * u.km, z=5 * u.km) el2 = EarthLocation(x=[1, 2] * u.km, y=[3, 4] * u.km, z=[5, 6] * u.km) sc = SkyCoord([1, 2], [3, 4], unit='deg,deg', frame='fk4', obstime='J1990.5') scc = sc.copy() scc.representation = 'cartesian' tm = Time([2450814.5, 2450815.5], format='jd', scale='tai', location=el) mixin_cols = { 'tm': tm, 'dt': TimeDelta([1, 2] * u.day), 'sc': sc, 'scc': scc, 'scd': SkyCoord([1, 2], [3, 4], [5, 6], unit='deg,deg,m', frame='fk4', obstime=['J1990.5', 'J1991.5']), 'q': [1, 2] * u.m, 'lat': Latitude([1, 2] * u.deg), 'lon': Longitude([1, 2] * u.deg, wrap_angle=180. * u.deg), 'ang': Angle([1, 2] * u.deg), 'el2': el2, } time_attrs = ['value', 'shape', 'format', 'scale', 'location'] compare_attrs = { 'c1': ['data'], 'c2': ['data'], 'tm': time_attrs, 'dt': ['shape', 'value', 'format', 'scale'], 'sc': ['ra', 'dec', 'representation', 'frame.name'], 'scc': ['x', 'y', 'z', 'representation', 'frame.name'], 'scd': ['ra', 'dec', 'distance', 'representation', 'frame.name'], 'q': ['value', 'unit'], 'lon': ['value', 'unit', 'wrap_angle'], 'lat': ['value', 'unit'], 'ang': ['value', 'unit'], 'el2': ['x', 'y', 'z', 'ellipsoid'], 'nd': ['x', 'y', 'z'], } @pytest.mark.skipif('not HAS_H5PY or not HAS_YAML') def test_hdf5_mixins_qtable_to_table(tmpdir): """Test writing as QTable and reading as Table. Ensure correct classes come out. """ filename = str(tmpdir.join('test_simple.hdf5')) names = sorted(mixin_cols) t = QTable([mixin_cols[name] for name in names], names=names) t.write(filename, format='hdf5', path='root', serialize_meta=True) t2 = Table.read(filename, format='hdf5', path='root') assert t.colnames == t2.colnames for name, col in t.columns.items(): col2 = t2[name] # Special-case Time, which does not yet support round-tripping # the format. if isinstance(col2, Time): col2.format = col.format attrs = compare_attrs[name] compare_class = True if isinstance(col.info, QuantityInfo): # Downgrade Quantity to Column + unit assert type(col2) is Column # Class-specific attributes like `value` or `wrap_angle` are lost. attrs = ['unit'] compare_class = False # Compare data values here (assert_objects_equal doesn't know how in this case) assert np.all(col.value == col2) assert_objects_equal(col, col2, attrs, compare_class) @pytest.mark.skipif('not HAS_H5PY or not HAS_YAML') @pytest.mark.parametrize('table_cls', (Table, QTable)) def test_hdf5_mixins_as_one(table_cls, tmpdir): """Test write/read all cols at once and validate intermediate column names""" filename = str(tmpdir.join('test_simple.hdf5')) names = sorted(mixin_cols) serialized_names = ['ang', 'dt.jd1', 'dt.jd2', 'el2.x', 'el2.y', 'el2.z', 'lat', 'lon', 'q', 'sc.ra', 'sc.dec', 'scc.x', 'scc.y', 'scc.z', 'scd.ra', 'scd.dec', 'scd.distance', 'scd.obstime.jd1', 'scd.obstime.jd2', 'tm.jd1', 'tm.jd2', ] t = table_cls([mixin_cols[name] for name in names], names=names) t.meta['C'] = 'spam' t.meta['comments'] = ['this', 'is', 'a', 'comment'] t.meta['history'] = ['first', 'second', 'third'] t.write(filename, format="hdf5", path='root', serialize_meta=True) t2 = table_cls.read(filename, format='hdf5', path='root') assert t2.meta['C'] == 'spam' assert t2.meta['comments'] == ['this', 'is', 'a', 'comment'] assert t2.meta['history'] == ['first', 'second', 'third'] assert t.colnames == t2.colnames # Read directly via hdf5 and confirm column names h5 = h5py.File(filename, 'r') assert list(h5['root'].dtype.names) == serialized_names h5.close() @pytest.mark.skipif('not HAS_H5PY or not HAS_YAML') @pytest.mark.parametrize('name_col', list(mixin_cols.items())) @pytest.mark.parametrize('table_cls', (Table, QTable)) def test_hdf5_mixins_per_column(table_cls, name_col, tmpdir): """Test write/read one col at a time and do detailed validation""" filename = str(tmpdir.join('test_simple.hdf5')) name, col = name_col c = [1.0, 2.0] t = table_cls([c, col, c], names=['c1', name, 'c2']) t[name].info.description = 'my description' t[name].info.meta = {'list': list(range(50)), 'dict': {'a': 'b' * 200}} if not t.has_mixin_columns: pytest.skip('column is not a mixin (e.g. Quantity subclass in Table)') if isinstance(t[name], NdarrayMixin): pytest.xfail('NdarrayMixin not supported') t.write(filename, format="hdf5", path='root', serialize_meta=True) t2 = table_cls.read(filename, format='hdf5', path='root') assert t.colnames == t2.colnames for colname in t.colnames: assert_objects_equal(t[colname], t2[colname], compare_attrs[colname]) # Special case to make sure Column type doesn't leak into Time class data if name.startswith('tm'): assert t2[name]._time.jd1.__class__ is np.ndarray assert t2[name]._time.jd2.__class__ is np.ndarray @pytest.mark.skipif('HAS_YAML or not HAS_H5PY') def test_warn_for_dropped_info_attributes(tmpdir): filename = str(tmpdir.join('test.hdf5')) t = Table([[1, 2]]) t['col0'].info.description = 'hello' with catch_warnings() as warns: t.write(filename, path='root', serialize_meta=False) assert len(warns) == 1 assert str(warns[0].message).startswith( "table contains column(s) with defined 'unit'") @pytest.mark.skipif('HAS_YAML or not HAS_H5PY') def test_error_for_mixins_but_no_yaml(tmpdir): filename = str(tmpdir.join('test.hdf5')) t = Table([mixin_cols['sc']]) with pytest.raises(TypeError) as err: t.write(filename, path='root', serialize_meta=True) assert "cannot write type SkyCoord column 'col0' to HDF5 without PyYAML" in str(err)
funbaker/astropy
astropy/io/misc/tests/test_hdf5.py
astropy/stats/lombscargle/_statistics.py
"""Implements the Astropy TestRunner which is a thin wrapper around py.test.""" import inspect import os import glob import copy import shlex import sys import tempfile import warnings import importlib from collections import OrderedDict from importlib.util import find_spec from ..config.paths import set_temp_config, set_temp_cache from ..utils import wraps, find_current_module from ..utils.exceptions import AstropyWarning, AstropyDeprecationWarning __all__ = ['TestRunner', 'TestRunnerBase', 'keyword'] def _has_test_dependencies(): # pragma: no cover # Using the test runner will not work without these dependencies, but # pytest-openfiles is optional, so it's not listed here. required = ['pytest', 'pytest_remotedata', 'pytest_doctestplus'] for module in required: spec = find_spec(module) # Checking loader accounts for packages that were uninstalled if spec is None or spec.loader is None: return False return True class keyword: """ A decorator to mark a method as keyword argument for the ``TestRunner``. Parameters ---------- default_value : `object` The default value for the keyword argument. (Default: `None`) priority : `int` keyword argument methods are executed in order of descending priority. """ def __init__(self, default_value=None, priority=0): self.default_value = default_value self.priority = priority def __call__(self, f): def keyword(*args, **kwargs): return f(*args, **kwargs) keyword._default_value = self.default_value keyword._priority = self.priority # Set __doc__ explicitly here rather than using wraps because we want # to keep the function name as keyword so we can inspect it later. keyword.__doc__ = f.__doc__ return keyword class TestRunnerBase: """ The base class for the TestRunner. A test runner can be constructed by creating a subclass of this class and defining 'keyword' methods. These are methods that have the `~astropy.tests.runner.keyword` decorator, these methods are used to construct allowed keyword arguments to the `~astropy.tests.runner.TestRunnerBase.run_tests` method as a way to allow customization of individual keyword arguments (and associated logic) without having to re-implement the whole `~astropy.tests.runner.TestRunnerBase.run_tests` method. Examples -------- A simple keyword method:: class MyRunner(TestRunnerBase): @keyword('default_value'): def spam(self, spam, kwargs): \"\"\" spam : `str` The parameter description for the run_tests docstring. \"\"\" # Return value must be a list with a CLI parameter for pytest. return ['--spam={}'.format(spam)] """ def __init__(self, base_path): self.base_path = os.path.abspath(base_path) def __new__(cls, *args, **kwargs): # Before constructing the class parse all the methods that have been # decorated with ``keyword``. # The objective of this method is to construct a default set of keyword # arguments to the ``run_tests`` method. It does this by inspecting the # methods of the class for functions with the name ``keyword`` which is # the name of the decorator wrapping function. Once it has created this # dictionary, it also formats the docstring of ``run_tests`` to be # comprised of the docstrings for the ``keyword`` methods. # To add a keyword argument to the ``run_tests`` method, define a new # method decorated with ``@keyword`` and with the ``self, name, kwargs`` # signature. # Get all 'function' members as the wrapped methods are functions functions = inspect.getmembers(cls, predicate=inspect.isfunction) # Filter out anything that's not got the name 'keyword' keywords = filter(lambda func: func[1].__name__ == 'keyword', functions) # Sort all keywords based on the priority flag. sorted_keywords = sorted(keywords, key=lambda x: x[1]._priority, reverse=True) cls.keywords = OrderedDict() doc_keywords = "" for name, func in sorted_keywords: # Here we test if the function has been overloaded to return # NotImplemented which is the way to disable arguments on # subclasses. If it has been disabled we need to remove it from the # default keywords dict. We do it in the try except block because # we do not have access to an instance of the class, so this is # going to error unless the method is just doing `return # NotImplemented`. try: # Second argument is False, as it is normally a bool. # The other two are placeholders for objects. if func(None, False, None) is NotImplemented: continue except Exception: pass # Construct the default kwargs dict and docstring cls.keywords[name] = func._default_value if func.__doc__: doc_keywords += ' '*8 doc_keywords += func.__doc__.strip() doc_keywords += '\n\n' cls.run_tests.__doc__ = cls.RUN_TESTS_DOCSTRING.format(keywords=doc_keywords) return super(TestRunnerBase, cls).__new__(cls) def _generate_args(self, **kwargs): # Update default values with passed kwargs # but don't modify the defaults keywords = copy.deepcopy(self.keywords) keywords.update(kwargs) # Iterate through the keywords (in order of priority) args = [] for keyword in keywords.keys(): func = getattr(self, keyword) result = func(keywords[keyword], keywords) # Allow disabling of options in a subclass if result is NotImplemented: raise TypeError("run_tests() got an unexpected keyword argument {}".format(keyword)) # keyword methods must return a list if not isinstance(result, list): raise TypeError("{} keyword method must return a list".format(keyword)) args += result return args RUN_TESTS_DOCSTRING = \ """ Run the tests for the package. Parameters ---------- {keywords} See Also -------- pytest.main : This method builds arguments for and then calls this function. """ def run_tests(self, **kwargs): # The following option will include eggs inside a .eggs folder in # sys.path when running the tests. This is possible so that when # runnning python setup.py test, test dependencies installed via e.g. # tests_requires are available here. This is not an advertised option # since it is only for internal use if kwargs.pop('add_local_eggs_to_path', False): # Add each egg to sys.path individually for egg in glob.glob(os.path.join('.eggs', '*.egg')): sys.path.insert(0, egg) # We now need to force reload pkg_resources in case any pytest # plugins were added above, so that their entry points are picked up import pkg_resources importlib.reload(pkg_resources) if not _has_test_dependencies(): # pragma: no cover msg = "Test dependencies are missing. You should install the 'pytest-astropy' package." raise RuntimeError(msg) # The docstring for this method is defined as a class variable. # This allows it to be built for each subclass in __new__. # Don't import pytest until it's actually needed to run the tests import pytest # Raise error for undefined kwargs allowed_kwargs = set(self.keywords.keys()) passed_kwargs = set(kwargs.keys()) if not passed_kwargs.issubset(allowed_kwargs): wrong_kwargs = list(passed_kwargs.difference(allowed_kwargs)) raise TypeError("run_tests() got an unexpected keyword argument {}".format(wrong_kwargs[0])) args = self._generate_args(**kwargs) if 'plugins' not in self.keywords or self.keywords['plugins'] is None: self.keywords['plugins'] = [] # Make plugins available to test runner without registering them self.keywords['plugins'].extend([ 'astropy.tests.plugins.display', 'astropy.tests.plugins.config' ]) # override the config locations to not make a new directory nor use # existing cache or config astropy_config = tempfile.mkdtemp('astropy_config') astropy_cache = tempfile.mkdtemp('astropy_cache') # Have to use nested with statements for cross-Python support # Note, using these context managers here is superfluous if the # config_dir or cache_dir options to py.test are in use, but it's # also harmless to nest the contexts with set_temp_config(astropy_config, delete=True): with set_temp_cache(astropy_cache, delete=True): return pytest.main(args=args, plugins=self.keywords['plugins']) @classmethod def make_test_runner_in(cls, path): """ Constructs a `TestRunner` to run in the given path, and returns a ``test()`` function which takes the same arguments as `TestRunner.run_tests`. The returned ``test()`` function will be defined in the module this was called from. This is used to implement the ``astropy.test()`` function (or the equivalent for affiliated packages). """ runner = cls(path) @wraps(runner.run_tests, ('__doc__',), exclude_args=('self',)) def test(**kwargs): return runner.run_tests(**kwargs) module = find_current_module(2) if module is not None: test.__module__ = module.__name__ # A somewhat unusual hack, but delete the attached __wrapped__ # attribute--although this is normally used to tell if the function # was wrapped with wraps, on some version of Python this is also # used to determine the signature to display in help() which is # not useful in this case. We don't really care in this case if the # function was wrapped either if hasattr(test, '__wrapped__'): del test.__wrapped__ return test class TestRunner(TestRunnerBase): """ A test runner for astropy tests """ # Increase priority so this warning is displayed first. @keyword(priority=1000) def coverage(self, coverage, kwargs): if coverage: warnings.warn( "The coverage option is ignored on run_tests, since it " "can not be made to work in that context. Use " "'python setup.py test --coverage' instead.", AstropyWarning) return [] # test_path depends on self.package_path so make sure this runs before # test_path. @keyword(priority=1) def package(self, package, kwargs): """ package : str, optional The name of a specific package to test, e.g. 'io.fits' or 'utils'. If nothing is specified all default Astropy tests are run. """ if package is None: self.package_path = self.base_path else: self.package_path = os.path.join(self.base_path, package.replace('.', os.path.sep)) if not os.path.isdir(self.package_path): raise ValueError('Package not found: {0}'.format(package)) if not kwargs['test_path']: return [self.package_path] return [] @keyword() def test_path(self, test_path, kwargs): """ test_path : str, optional Specify location to test by path. May be a single file or directory. Must be specified absolutely or relative to the calling directory. """ all_args = [] # Ensure that the package kwarg has been run. self.package(kwargs['package'], kwargs) if test_path: base, ext = os.path.splitext(test_path) if ext in ('.rst', ''): if kwargs['docs_path'] is None: # This shouldn't happen from "python setup.py test" raise ValueError( "Can not test .rst files without a docs_path " "specified.") abs_docs_path = os.path.abspath(kwargs['docs_path']) abs_test_path = os.path.abspath( os.path.join(abs_docs_path, os.pardir, test_path)) common = os.path.commonprefix((abs_docs_path, abs_test_path)) if os.path.exists(abs_test_path) and common == abs_docs_path: # Turn on the doctest_rst plugin all_args.append('--doctest-rst') test_path = abs_test_path if not (os.path.isdir(test_path) or ext in ('.py', '.rst')): raise ValueError("Test path must be a directory or a path to " "a .py or .rst file") return all_args + [test_path] return [] @keyword() def args(self, args, kwargs): """ args : str, optional Additional arguments to be passed to ``pytest.main`` in the ``args`` keyword argument. """ if args: return shlex.split(args, posix=not sys.platform.startswith('win')) return [] @keyword() def plugins(self, plugins, kwargs): """ plugins : list, optional Plugins to be passed to ``pytest.main`` in the ``plugins`` keyword argument. """ return [] @keyword() def verbose(self, verbose, kwargs): """ verbose : bool, optional Convenience option to turn on verbose output from py.test. Passing True is the same as specifying ``-v`` in ``args``. """ if verbose: return ['-v'] return [] @keyword() def pastebin(self, pastebin, kwargs): """ pastebin : ('failed', 'all', None), optional Convenience option for turning on py.test pastebin output. Set to 'failed' to upload info for failed tests, or 'all' to upload info for all tests. """ if pastebin is not None: if pastebin in ['failed', 'all']: return ['--pastebin={0}'.format(pastebin)] else: raise ValueError("pastebin should be 'failed' or 'all'") return [] @keyword(default_value='none') def remote_data(self, remote_data, kwargs): """ remote_data : {'none', 'astropy', 'any'}, optional Controls whether to run tests marked with @pytest.mark.remote_data. This can be set to run no tests with remote data (``none``), only ones that use data from http://data.astropy.org (``astropy``), or all tests that use remote data (``any``). The default is ``none``. """ if remote_data is True: remote_data = 'any' elif remote_data is False: remote_data = 'none' elif remote_data not in ('none', 'astropy', 'any'): warnings.warn("The remote_data option should be one of " "none/astropy/any (found {0}). For backward-compatibility, " "assuming 'any', but you should change the option to be " "one of the supported ones to avoid issues in " "future.".format(remote_data), AstropyDeprecationWarning) remote_data = 'any' return ['--remote-data={0}'.format(remote_data)] @keyword() def pep8(self, pep8, kwargs): """ pep8 : bool, optional Turn on PEP8 checking via the pytest-pep8 plugin and disable normal tests. Same as specifying ``--pep8 -k pep8`` in ``args``. """ if pep8: try: import pytest_pep8 # pylint: disable=W0611 except ImportError: raise ImportError('PEP8 checking requires pytest-pep8 plugin: ' 'http://pypi.python.org/pypi/pytest-pep8') else: return ['--pep8', '-k', 'pep8'] return [] @keyword() def pdb(self, pdb, kwargs): """ pdb : bool, optional Turn on PDB post-mortem analysis for failing tests. Same as specifying ``--pdb`` in ``args``. """ if pdb: return ['--pdb'] return [] @keyword() def open_files(self, open_files, kwargs): """ open_files : bool, optional Fail when any tests leave files open. Off by default, because this adds extra run time to the test suite. Requires the ``psutil`` package. """ if open_files: if kwargs['parallel'] != 0: raise SystemError( "open file detection may not be used in conjunction with " "parallel testing.") try: import psutil # pylint: disable=W0611 except ImportError: raise SystemError( "open file detection requested, but psutil package " "is not installed.") return ['--open-files'] print("Checking for unclosed files") return [] @keyword(0) def parallel(self, parallel, kwargs): """ parallel : int, optional When provided, run the tests in parallel on the specified number of CPUs. If parallel is negative, it will use the all the cores on the machine. Requires the ``pytest-xdist`` plugin. """ if parallel != 0: try: from xdist import plugin # noqa except ImportError: raise SystemError( "running tests in parallel requires the pytest-xdist package") return ['-n', str(parallel)] return [] @keyword() def docs_path(self, docs_path, kwargs): """ docs_path : str, optional The path to the documentation .rst files. """ if docs_path is not None and not kwargs['skip_docs']: if kwargs['package'] is not None: docs_path = os.path.join( docs_path, kwargs['package'].replace('.', os.path.sep)) if not os.path.exists(docs_path): warnings.warn( "Can not test .rst docs, since docs path " "({0}) does not exist.".format(docs_path)) docs_path = None if docs_path and not kwargs['skip_docs'] and not kwargs['test_path']: return [docs_path, '--doctest-rst'] return [] @keyword() def skip_docs(self, skip_docs, kwargs): """ skip_docs : `bool`, optional When `True`, skips running the doctests in the .rst files. """ # Skip docs is a bool used by docs_path only. return [] @keyword() def repeat(self, repeat, kwargs): """ repeat : `int`, optional If set, specifies how many times each test should be run. This is useful for diagnosing sporadic failures. """ if repeat: return ['--repeat={0}'.format(repeat)] return [] # Override run_tests for astropy-specific fixes def run_tests(self, **kwargs): # This prevents cyclical import problems that make it # impossible to test packages that define Table types on their # own. from ..table import Table # pylint: disable=W0611 return super(TestRunner, self).run_tests(**kwargs)
# Licensed under a 3-clause BSD style license - see LICENSE.rst import pytest import numpy as np from ....tests.helper import catch_warnings from ....table import Table, QTable, NdarrayMixin, Column from .... import units as u from ....coordinates import SkyCoord, Latitude, Longitude, Angle, EarthLocation from ....time import Time, TimeDelta from ....units.quantity import QuantityInfo try: import h5py except ImportError: HAS_H5PY = False else: HAS_H5PY = True try: import yaml except ImportError: HAS_YAML = False else: HAS_YAML = True ALL_DTYPES = [np.uint8, np.uint16, np.uint32, np.uint64, np.int8, np.int16, np.int32, np.int64, np.float32, np.float64, np.bool_, '|S3'] def _default_values(dtype): if dtype == np.bool_: return [0, 1, 1] elif dtype == '|S3': return [b'abc', b'def', b'ghi'] else: return [1, 2, 3] @pytest.mark.skipif('not HAS_H5PY') def test_write_nopath(tmpdir): test_file = str(tmpdir.join('test.hdf5')) t1 = Table() t1.add_column(Column(name='a', data=[1, 2, 3])) with pytest.raises(ValueError) as exc: t1.write(test_file) assert exc.value.args[0] == "table path should be set via the path= argument" @pytest.mark.skipif('not HAS_H5PY') def test_read_notable_nopath(tmpdir): test_file = str(tmpdir.join('test.hdf5')) h5py.File(test_file, 'w').close() # create empty file with pytest.raises(ValueError) as exc: t1 = Table.read(test_file, path='/', format='hdf5') assert exc.value.args[0] == 'no table found in HDF5 group /' @pytest.mark.skipif('not HAS_H5PY') def test_read_nopath(tmpdir): test_file = str(tmpdir.join('test.hdf5')) t1 = Table() t1.add_column(Column(name='a', data=[1, 2, 3])) t1.write(test_file, path='the_table') t2 = Table.read(test_file) assert np.all(t1['a'] == t2['a']) @pytest.mark.skipif('not HAS_H5PY') def test_write_invalid_path(tmpdir): test_file = str(tmpdir.join('test.hdf5')) t1 = Table() t1.add_column(Column(name='a', data=[1, 2, 3])) with pytest.raises(ValueError) as exc: t1.write(test_file, path='test/') assert exc.value.args[0] == "table path should end with table name, not /" @pytest.mark.skipif('not HAS_H5PY') def test_read_invalid_path(tmpdir): test_file = str(tmpdir.join('test.hdf5')) t1 = Table() t1.add_column(Column(name='a', data=[1, 2, 3])) t1.write(test_file, path='the_table') with pytest.raises(OSError) as exc: Table.read(test_file, path='test/') assert exc.value.args[0] == "Path test/ does not exist" @pytest.mark.skipif('not HAS_H5PY') def test_read_missing_group(tmpdir): test_file = str(tmpdir.join('test.hdf5')) h5py.File(test_file, 'w').close() # create empty file with pytest.raises(OSError) as exc: Table.read(test_file, path='test/path/table') assert exc.value.args[0] == "Path test/path/table does not exist" @pytest.mark.skipif('not HAS_H5PY') def test_read_missing_table(tmpdir): test_file = str(tmpdir.join('test.hdf5')) with h5py.File(test_file, 'w') as f: f.create_group('test').create_group('path') with pytest.raises(OSError) as exc: Table.read(test_file, path='test/path/table') assert exc.value.args[0] == "Path test/path/table does not exist" @pytest.mark.skipif('not HAS_H5PY') def test_read_missing_group_fileobj(tmpdir): test_file = str(tmpdir.join('test.hdf5')) with h5py.File(test_file, 'w') as f: with pytest.raises(OSError) as exc: Table.read(f, path='test/path/table') assert exc.value.args[0] == "Path test/path/table does not exist" @pytest.mark.skipif('not HAS_H5PY') def test_read_write_simple(tmpdir): test_file = str(tmpdir.join('test.hdf5')) t1 = Table() t1.add_column(Column(name='a', data=[1, 2, 3])) t1.write(test_file, path='the_table') t2 = Table.read(test_file, path='the_table') assert np.all(t2['a'] == [1, 2, 3]) @pytest.mark.skipif('not HAS_H5PY') def test_read_write_existing_table(tmpdir): test_file = str(tmpdir.join('test.hdf5')) t1 = Table() t1.add_column(Column(name='a', data=[1, 2, 3])) t1.write(test_file, path='the_table') with pytest.raises(OSError) as exc: t1.write(test_file, path='the_table', append=True) assert exc.value.args[0] == "Table the_table already exists" @pytest.mark.skipif('not HAS_H5PY') def test_read_write_memory(tmpdir): with h5py.File('test', 'w', driver='core', backing_store=False) as output_file: t1 = Table() t1.add_column(Column(name='a', data=[1, 2, 3])) t1.write(output_file, path='the_table') t2 = Table.read(output_file, path='the_table') assert np.all(t2['a'] == [1, 2, 3]) @pytest.mark.skipif('not HAS_H5PY') def test_read_write_existing(tmpdir): test_file = str(tmpdir.join('test.hdf5')) h5py.File(test_file, 'w').close() # create empty file t1 = Table() t1.add_column(Column(name='a', data=[1, 2, 3])) with pytest.raises(OSError) as exc: t1.write(test_file, path='the_table') assert exc.value.args[0].startswith("File exists:") @pytest.mark.skipif('not HAS_H5PY') def test_read_write_existing_overwrite(tmpdir): test_file = str(tmpdir.join('test.hdf5')) h5py.File(test_file, 'w').close() # create empty file t1 = Table() t1.add_column(Column(name='a', data=[1, 2, 3])) t1.write(test_file, path='the_table', overwrite=True) t2 = Table.read(test_file, path='the_table') assert np.all(t2['a'] == [1, 2, 3]) @pytest.mark.skipif('not HAS_H5PY') def test_read_write_existing_append(tmpdir): test_file = str(tmpdir.join('test.hdf5')) h5py.File(test_file, 'w').close() # create empty file t1 = Table() t1.add_column(Column(name='a', data=[1, 2, 3])) t1.write(test_file, path='the_table_1', append=True) t1.write(test_file, path='the_table_2', append=True) t2 = Table.read(test_file, path='the_table_1') assert np.all(t2['a'] == [1, 2, 3]) t3 = Table.read(test_file, path='the_table_2') assert np.all(t3['a'] == [1, 2, 3]) @pytest.mark.skipif('not HAS_H5PY') def test_read_write_existing_append_groups(tmpdir): test_file = str(tmpdir.join('test.hdf5')) with h5py.File(test_file, 'w') as f: f.create_group('test_1') t1 = Table() t1.add_column(Column(name='a', data=[1, 2, 3])) t1.write(test_file, path='test_1/the_table_1', append=True) t1.write(test_file, path='test_2/the_table_2', append=True) t2 = Table.read(test_file, path='test_1/the_table_1') assert np.all(t2['a'] == [1, 2, 3]) t3 = Table.read(test_file, path='test_2/the_table_2') assert np.all(t3['a'] == [1, 2, 3]) @pytest.mark.skipif('not HAS_H5PY') def test_read_write_existing_append_overwrite(tmpdir): test_file = str(tmpdir.join('test.hdf5')) t1 = Table() t1.add_column(Column(name='a', data=[1, 2, 3])) t1.write(test_file, path='table1') t1.write(test_file, path='table2', append=True) t1v2 = Table() t1v2.add_column(Column(name='a', data=[4, 5, 6])) with pytest.raises(OSError) as exc: t1v2.write(test_file, path='table1', append=True) assert exc.value.args[0] == 'Table table1 already exists' t1v2.write(test_file, path='table1', append=True, overwrite=True) t2 = Table.read(test_file, path='table1') assert np.all(t2['a'] == [4, 5, 6]) t3 = Table.read(test_file, path='table2') assert np.all(t3['a'] == [1, 2, 3]) @pytest.mark.skipif('not HAS_H5PY') def test_read_fileobj(tmpdir): test_file = str(tmpdir.join('test.hdf5')) t1 = Table() t1.add_column(Column(name='a', data=[1, 2, 3])) t1.write(test_file, path='the_table') import h5py with h5py.File(test_file, 'r') as input_file: t2 = Table.read(input_file, path='the_table') assert np.all(t2['a'] == [1, 2, 3]) @pytest.mark.skipif('not HAS_H5PY') def test_read_filobj_path(tmpdir): test_file = str(tmpdir.join('test.hdf5')) t1 = Table() t1.add_column(Column(name='a', data=[1, 2, 3])) t1.write(test_file, path='path/to/data/the_table') import h5py with h5py.File(test_file, 'r') as input_file: t2 = Table.read(input_file, path='path/to/data/the_table') assert np.all(t2['a'] == [1, 2, 3]) @pytest.mark.skipif('not HAS_H5PY') def test_read_filobj_group_path(tmpdir): test_file = str(tmpdir.join('test.hdf5')) t1 = Table() t1.add_column(Column(name='a', data=[1, 2, 3])) t1.write(test_file, path='path/to/data/the_table') import h5py with h5py.File(test_file, 'r') as input_file: t2 = Table.read(input_file['path/to'], path='data/the_table') assert np.all(t2['a'] == [1, 2, 3]) @pytest.mark.skipif('not HAS_H5PY') def test_read_wrong_fileobj(): class FakeFile: def read(self): pass f = FakeFile() with pytest.raises(TypeError) as exc: t1 = Table.read(f, format='hdf5') assert exc.value.args[0] == 'h5py can only open regular files' @pytest.mark.skipif('not HAS_H5PY') def test_write_fileobj(tmpdir): test_file = str(tmpdir.join('test.hdf5')) import h5py with h5py.File(test_file, 'w') as output_file: t1 = Table() t1.add_column(Column(name='a', data=[1, 2, 3])) t1.write(output_file, path='the_table') t2 = Table.read(test_file, path='the_table') assert np.all(t2['a'] == [1, 2, 3]) @pytest.mark.skipif('not HAS_H5PY') def test_write_filobj_group(tmpdir): test_file = str(tmpdir.join('test.hdf5')) import h5py with h5py.File(test_file, 'w') as output_file: t1 = Table() t1.add_column(Column(name='a', data=[1, 2, 3])) t1.write(output_file, path='path/to/data/the_table') t2 = Table.read(test_file, path='path/to/data/the_table') assert np.all(t2['a'] == [1, 2, 3]) @pytest.mark.skipif('not HAS_H5PY') def test_write_wrong_type(): t1 = Table() t1.add_column(Column(name='a', data=[1, 2, 3])) with pytest.raises(TypeError) as exc: t1.write(1212, path='path/to/data/the_table', format='hdf5') assert exc.value.args[0] == ('output should be a string ' 'or an h5py File or Group object') @pytest.mark.skipif('not HAS_H5PY') @pytest.mark.parametrize(('dtype'), ALL_DTYPES) def test_preserve_single_dtypes(tmpdir, dtype): test_file = str(tmpdir.join('test.hdf5')) values = _default_values(dtype) t1 = Table() t1.add_column(Column(name='a', data=np.array(values, dtype=dtype))) t1.write(test_file, path='the_table') t2 = Table.read(test_file, path='the_table') assert np.all(t2['a'] == values) assert t2['a'].dtype == dtype @pytest.mark.skipif('not HAS_H5PY') def test_preserve_all_dtypes(tmpdir): test_file = str(tmpdir.join('test.hdf5')) t1 = Table() for dtype in ALL_DTYPES: values = _default_values(dtype) t1.add_column(Column(name=str(dtype), data=np.array(values, dtype=dtype))) t1.write(test_file, path='the_table') t2 = Table.read(test_file, path='the_table') for dtype in ALL_DTYPES: values = _default_values(dtype) assert np.all(t2[str(dtype)] == values) assert t2[str(dtype)].dtype == dtype @pytest.mark.skipif('not HAS_H5PY') def test_preserve_meta(tmpdir): test_file = str(tmpdir.join('test.hdf5')) t1 = Table() t1.add_column(Column(name='a', data=[1, 2, 3])) t1.meta['a'] = 1 t1.meta['b'] = 'hello' t1.meta['c'] = 3.14159 t1.meta['d'] = True t1.meta['e'] = np.array([1, 2, 3]) t1.write(test_file, path='the_table') t2 = Table.read(test_file, path='the_table') for key in t1.meta: assert np.all(t1.meta[key] == t2.meta[key]) @pytest.mark.skipif('not HAS_H5PY or not HAS_YAML') def test_preserve_serialized(tmpdir): test_file = str(tmpdir.join('test.hdf5')) t1 = Table() t1['a'] = Column(data=[1, 2, 3], unit="s") t1['a'].meta['a0'] = "A0" t1['a'].meta['a1'] = {"a1": [0, 1]} t1['a'].format = '7.3f' t1['a'].description = 'A column' t1.meta['b'] = 1 t1.meta['c'] = {"c0": [0, 1]} t1.write(test_file, path='the_table', serialize_meta=True, overwrite=True) t2 = Table.read(test_file, path='the_table') assert t1['a'].unit == t2['a'].unit assert t1['a'].format == t2['a'].format assert t1['a'].description == t2['a'].description assert t1['a'].meta == t2['a'].meta assert t1.meta == t2.meta @pytest.mark.skipif('not HAS_H5PY or not HAS_YAML') def test_preserve_serialized_compatibility_mode(tmpdir): test_file = str(tmpdir.join('test.hdf5')) t1 = Table() t1['a'] = Column(data=[1, 2, 3], unit="s") t1['a'].meta['a0'] = "A0" t1['a'].meta['a1'] = {"a1": [0, 1]} t1['a'].format = '7.3f' t1['a'].description = 'A column' t1.meta['b'] = 1 t1.meta['c'] = {"c0": [0, 1]} with catch_warnings() as w: t1.write(test_file, path='the_table', serialize_meta=True, overwrite=True, compatibility_mode=True) assert str(w[0].message).startswith( "compatibility mode for writing is deprecated") t2 = Table.read(test_file, path='the_table') assert t1['a'].unit == t2['a'].unit assert t1['a'].format == t2['a'].format assert t1['a'].description == t2['a'].description assert t1['a'].meta == t2['a'].meta assert t1.meta == t2.meta @pytest.mark.skipif('not HAS_H5PY or not HAS_YAML') def test_preserve_serialized_in_complicated_path(tmpdir): test_file = str(tmpdir.join('test.hdf5')) t1 = Table() t1['a'] = Column(data=[1, 2, 3], unit="s") t1['a'].meta['a0'] = "A0" t1['a'].meta['a1'] = {"a1": [0, 1]} t1['a'].format = '7.3f' t1['a'].description = 'A column' t1.meta['b'] = 1 t1.meta['c'] = {"c0": [0, 1]} t1.write(test_file, path='the_table/complicated/path', serialize_meta=True, overwrite=True) t2 = Table.read(test_file, path='the_table/complicated/path') assert t1['a'].format == t2['a'].format assert t1['a'].unit == t2['a'].unit assert t1['a'].description == t2['a'].description assert t1['a'].meta == t2['a'].meta assert t1.meta == t2.meta @pytest.mark.skipif('not HAS_H5PY or not HAS_YAML') def test_metadata_very_large(tmpdir): """Test that very large datasets work, now!""" test_file = str(tmpdir.join('test.hdf5')) t1 = Table() t1['a'] = Column(data=[1, 2, 3], unit="s") t1['a'].meta['a0'] = "A0" t1['a'].meta['a1'] = {"a1": [0, 1]} t1['a'].format = '7.3f' t1['a'].description = 'A column' t1.meta['b'] = 1 t1.meta['c'] = {"c0": [0, 1]} t1.meta["meta_big"] = "0" * (2 ** 16 + 1) t1.meta["meta_biggerstill"] = "0" * (2 ** 18) t1.write(test_file, path='the_table', serialize_meta=True, overwrite=True) t2 = Table.read(test_file, path='the_table') assert t1['a'].unit == t2['a'].unit assert t1['a'].format == t2['a'].format assert t1['a'].description == t2['a'].description assert t1['a'].meta == t2['a'].meta assert t1.meta == t2.meta @pytest.mark.skipif('not HAS_H5PY or not HAS_YAML') def test_metadata_very_large_fails_compatibility_mode(tmpdir): """Test that very large metadata do not work in compatibility mode.""" test_file = str(tmpdir.join('test.hdf5')) t1 = Table() t1['a'] = Column(data=[1, 2, 3]) t1.meta["meta"] = "0" * (2 ** 16 + 1) with catch_warnings() as w: t1.write(test_file, path='the_table', serialize_meta=True, overwrite=True, compatibility_mode=True) assert len(w) == 2 # Error message slightly changed in h5py 2.7.1, thus the 2part assert assert str(w[1].message).startswith( "Attributes could not be written to the output HDF5 " "file: Unable to create attribute ") assert "bject header message is too large" in str(w[1].message) @pytest.mark.skipif('not HAS_H5PY') def test_skip_meta(tmpdir): test_file = str(tmpdir.join('test.hdf5')) t1 = Table() t1.add_column(Column(name='a', data=[1, 2, 3])) t1.meta['a'] = 1 t1.meta['b'] = 'hello' t1.meta['c'] = 3.14159 t1.meta['d'] = True t1.meta['e'] = np.array([1, 2, 3]) t1.meta['f'] = str with catch_warnings() as w: t1.write(test_file, path='the_table') assert len(w) == 1 assert str(w[0].message).startswith( "Attribute `f` of type {0} cannot be written to HDF5 files - skipping".format(type(t1.meta['f']))) @pytest.mark.skipif('not HAS_H5PY or not HAS_YAML') def test_fail_meta_serialize(tmpdir): test_file = str(tmpdir.join('test.hdf5')) t1 = Table() t1.add_column(Column(name='a', data=[1, 2, 3])) t1.meta['f'] = str with pytest.raises(Exception) as err: t1.write(test_file, path='the_table', serialize_meta=True) assert "cannot represent an object: <class 'str'>" in str(err) @pytest.mark.skipif('not HAS_H5PY') def test_read_h5py_objects(tmpdir): # Regression test - ensure that Datasets are recognized automatically test_file = str(tmpdir.join('test.hdf5')) import h5py with h5py.File(test_file, 'w') as output_file: t1 = Table() t1.add_column(Column(name='a', data=[1, 2, 3])) t1.write(output_file, path='the_table') f = h5py.File(test_file) t2 = Table.read(f, path='the_table') assert np.all(t2['a'] == [1, 2, 3]) t3 = Table.read(f['/'], path='the_table') assert np.all(t3['a'] == [1, 2, 3]) t4 = Table.read(f['the_table']) assert np.all(t4['a'] == [1, 2, 3]) f.close() # don't raise an error in 'test --open-files' def assert_objects_equal(obj1, obj2, attrs, compare_class=True): if compare_class: assert obj1.__class__ is obj2.__class__ info_attrs = ['info.name', 'info.format', 'info.unit', 'info.description', 'info.meta'] for attr in attrs + info_attrs: a1 = obj1 a2 = obj2 for subattr in attr.split('.'): try: a1 = getattr(a1, subattr) a2 = getattr(a2, subattr) except AttributeError: a1 = a1[subattr] a2 = a2[subattr] # Mixin info.meta can None instead of empty OrderedDict(), #6720 would # fix this. if attr == 'info.meta': if a1 is None: a1 = {} if a2 is None: a2 = {} assert np.all(a1 == a2) # Testing HDF5 table read/write with mixins. This is mostly # copied from FITS mixin testing. el = EarthLocation(x=1 * u.km, y=3 * u.km, z=5 * u.km) el2 = EarthLocation(x=[1, 2] * u.km, y=[3, 4] * u.km, z=[5, 6] * u.km) sc = SkyCoord([1, 2], [3, 4], unit='deg,deg', frame='fk4', obstime='J1990.5') scc = sc.copy() scc.representation = 'cartesian' tm = Time([2450814.5, 2450815.5], format='jd', scale='tai', location=el) mixin_cols = { 'tm': tm, 'dt': TimeDelta([1, 2] * u.day), 'sc': sc, 'scc': scc, 'scd': SkyCoord([1, 2], [3, 4], [5, 6], unit='deg,deg,m', frame='fk4', obstime=['J1990.5', 'J1991.5']), 'q': [1, 2] * u.m, 'lat': Latitude([1, 2] * u.deg), 'lon': Longitude([1, 2] * u.deg, wrap_angle=180. * u.deg), 'ang': Angle([1, 2] * u.deg), 'el2': el2, } time_attrs = ['value', 'shape', 'format', 'scale', 'location'] compare_attrs = { 'c1': ['data'], 'c2': ['data'], 'tm': time_attrs, 'dt': ['shape', 'value', 'format', 'scale'], 'sc': ['ra', 'dec', 'representation', 'frame.name'], 'scc': ['x', 'y', 'z', 'representation', 'frame.name'], 'scd': ['ra', 'dec', 'distance', 'representation', 'frame.name'], 'q': ['value', 'unit'], 'lon': ['value', 'unit', 'wrap_angle'], 'lat': ['value', 'unit'], 'ang': ['value', 'unit'], 'el2': ['x', 'y', 'z', 'ellipsoid'], 'nd': ['x', 'y', 'z'], } @pytest.mark.skipif('not HAS_H5PY or not HAS_YAML') def test_hdf5_mixins_qtable_to_table(tmpdir): """Test writing as QTable and reading as Table. Ensure correct classes come out. """ filename = str(tmpdir.join('test_simple.hdf5')) names = sorted(mixin_cols) t = QTable([mixin_cols[name] for name in names], names=names) t.write(filename, format='hdf5', path='root', serialize_meta=True) t2 = Table.read(filename, format='hdf5', path='root') assert t.colnames == t2.colnames for name, col in t.columns.items(): col2 = t2[name] # Special-case Time, which does not yet support round-tripping # the format. if isinstance(col2, Time): col2.format = col.format attrs = compare_attrs[name] compare_class = True if isinstance(col.info, QuantityInfo): # Downgrade Quantity to Column + unit assert type(col2) is Column # Class-specific attributes like `value` or `wrap_angle` are lost. attrs = ['unit'] compare_class = False # Compare data values here (assert_objects_equal doesn't know how in this case) assert np.all(col.value == col2) assert_objects_equal(col, col2, attrs, compare_class) @pytest.mark.skipif('not HAS_H5PY or not HAS_YAML') @pytest.mark.parametrize('table_cls', (Table, QTable)) def test_hdf5_mixins_as_one(table_cls, tmpdir): """Test write/read all cols at once and validate intermediate column names""" filename = str(tmpdir.join('test_simple.hdf5')) names = sorted(mixin_cols) serialized_names = ['ang', 'dt.jd1', 'dt.jd2', 'el2.x', 'el2.y', 'el2.z', 'lat', 'lon', 'q', 'sc.ra', 'sc.dec', 'scc.x', 'scc.y', 'scc.z', 'scd.ra', 'scd.dec', 'scd.distance', 'scd.obstime.jd1', 'scd.obstime.jd2', 'tm.jd1', 'tm.jd2', ] t = table_cls([mixin_cols[name] for name in names], names=names) t.meta['C'] = 'spam' t.meta['comments'] = ['this', 'is', 'a', 'comment'] t.meta['history'] = ['first', 'second', 'third'] t.write(filename, format="hdf5", path='root', serialize_meta=True) t2 = table_cls.read(filename, format='hdf5', path='root') assert t2.meta['C'] == 'spam' assert t2.meta['comments'] == ['this', 'is', 'a', 'comment'] assert t2.meta['history'] == ['first', 'second', 'third'] assert t.colnames == t2.colnames # Read directly via hdf5 and confirm column names h5 = h5py.File(filename, 'r') assert list(h5['root'].dtype.names) == serialized_names h5.close() @pytest.mark.skipif('not HAS_H5PY or not HAS_YAML') @pytest.mark.parametrize('name_col', list(mixin_cols.items())) @pytest.mark.parametrize('table_cls', (Table, QTable)) def test_hdf5_mixins_per_column(table_cls, name_col, tmpdir): """Test write/read one col at a time and do detailed validation""" filename = str(tmpdir.join('test_simple.hdf5')) name, col = name_col c = [1.0, 2.0] t = table_cls([c, col, c], names=['c1', name, 'c2']) t[name].info.description = 'my description' t[name].info.meta = {'list': list(range(50)), 'dict': {'a': 'b' * 200}} if not t.has_mixin_columns: pytest.skip('column is not a mixin (e.g. Quantity subclass in Table)') if isinstance(t[name], NdarrayMixin): pytest.xfail('NdarrayMixin not supported') t.write(filename, format="hdf5", path='root', serialize_meta=True) t2 = table_cls.read(filename, format='hdf5', path='root') assert t.colnames == t2.colnames for colname in t.colnames: assert_objects_equal(t[colname], t2[colname], compare_attrs[colname]) # Special case to make sure Column type doesn't leak into Time class data if name.startswith('tm'): assert t2[name]._time.jd1.__class__ is np.ndarray assert t2[name]._time.jd2.__class__ is np.ndarray @pytest.mark.skipif('HAS_YAML or not HAS_H5PY') def test_warn_for_dropped_info_attributes(tmpdir): filename = str(tmpdir.join('test.hdf5')) t = Table([[1, 2]]) t['col0'].info.description = 'hello' with catch_warnings() as warns: t.write(filename, path='root', serialize_meta=False) assert len(warns) == 1 assert str(warns[0].message).startswith( "table contains column(s) with defined 'unit'") @pytest.mark.skipif('HAS_YAML or not HAS_H5PY') def test_error_for_mixins_but_no_yaml(tmpdir): filename = str(tmpdir.join('test.hdf5')) t = Table([mixin_cols['sc']]) with pytest.raises(TypeError) as err: t.write(filename, path='root', serialize_meta=True) assert "cannot write type SkyCoord column 'col0' to HDF5 without PyYAML" in str(err)
funbaker/astropy
astropy/io/misc/tests/test_hdf5.py
astropy/tests/runner.py
# Licensed under a 3-clause BSD style license - see LICENSE.rst """ This is a collection of monkey patches and workarounds for bugs in earlier versions of Numpy. """ from ...utils import minversion __all__ = ['NUMPY_LT_1_10_4', 'NUMPY_LT_1_11', 'NUMPY_LT_1_11_2', 'NUMPY_LT_1_12', 'NUMPY_LT_1_13', 'NUMPY_LT_1_14'] # TODO: It might also be nice to have aliases to these named for specific # features/bugs we're checking for (ex: # astropy.table.table._BROKEN_UNICODE_TABLE_SORT) NUMPY_LT_1_10_4 = not minversion('numpy', '1.10.4') NUMPY_LT_1_11 = not minversion('numpy', '1.11.0') NUMPY_LT_1_11_2 = not minversion('numpy', '1.11.2') NUMPY_LT_1_12 = not minversion('numpy', '1.12') NUMPY_LT_1_13 = not minversion('numpy', '1.13') NUMPY_LT_1_14 = not minversion('numpy', '1.14dev')
# Licensed under a 3-clause BSD style license - see LICENSE.rst import pytest import numpy as np from ....tests.helper import catch_warnings from ....table import Table, QTable, NdarrayMixin, Column from .... import units as u from ....coordinates import SkyCoord, Latitude, Longitude, Angle, EarthLocation from ....time import Time, TimeDelta from ....units.quantity import QuantityInfo try: import h5py except ImportError: HAS_H5PY = False else: HAS_H5PY = True try: import yaml except ImportError: HAS_YAML = False else: HAS_YAML = True ALL_DTYPES = [np.uint8, np.uint16, np.uint32, np.uint64, np.int8, np.int16, np.int32, np.int64, np.float32, np.float64, np.bool_, '|S3'] def _default_values(dtype): if dtype == np.bool_: return [0, 1, 1] elif dtype == '|S3': return [b'abc', b'def', b'ghi'] else: return [1, 2, 3] @pytest.mark.skipif('not HAS_H5PY') def test_write_nopath(tmpdir): test_file = str(tmpdir.join('test.hdf5')) t1 = Table() t1.add_column(Column(name='a', data=[1, 2, 3])) with pytest.raises(ValueError) as exc: t1.write(test_file) assert exc.value.args[0] == "table path should be set via the path= argument" @pytest.mark.skipif('not HAS_H5PY') def test_read_notable_nopath(tmpdir): test_file = str(tmpdir.join('test.hdf5')) h5py.File(test_file, 'w').close() # create empty file with pytest.raises(ValueError) as exc: t1 = Table.read(test_file, path='/', format='hdf5') assert exc.value.args[0] == 'no table found in HDF5 group /' @pytest.mark.skipif('not HAS_H5PY') def test_read_nopath(tmpdir): test_file = str(tmpdir.join('test.hdf5')) t1 = Table() t1.add_column(Column(name='a', data=[1, 2, 3])) t1.write(test_file, path='the_table') t2 = Table.read(test_file) assert np.all(t1['a'] == t2['a']) @pytest.mark.skipif('not HAS_H5PY') def test_write_invalid_path(tmpdir): test_file = str(tmpdir.join('test.hdf5')) t1 = Table() t1.add_column(Column(name='a', data=[1, 2, 3])) with pytest.raises(ValueError) as exc: t1.write(test_file, path='test/') assert exc.value.args[0] == "table path should end with table name, not /" @pytest.mark.skipif('not HAS_H5PY') def test_read_invalid_path(tmpdir): test_file = str(tmpdir.join('test.hdf5')) t1 = Table() t1.add_column(Column(name='a', data=[1, 2, 3])) t1.write(test_file, path='the_table') with pytest.raises(OSError) as exc: Table.read(test_file, path='test/') assert exc.value.args[0] == "Path test/ does not exist" @pytest.mark.skipif('not HAS_H5PY') def test_read_missing_group(tmpdir): test_file = str(tmpdir.join('test.hdf5')) h5py.File(test_file, 'w').close() # create empty file with pytest.raises(OSError) as exc: Table.read(test_file, path='test/path/table') assert exc.value.args[0] == "Path test/path/table does not exist" @pytest.mark.skipif('not HAS_H5PY') def test_read_missing_table(tmpdir): test_file = str(tmpdir.join('test.hdf5')) with h5py.File(test_file, 'w') as f: f.create_group('test').create_group('path') with pytest.raises(OSError) as exc: Table.read(test_file, path='test/path/table') assert exc.value.args[0] == "Path test/path/table does not exist" @pytest.mark.skipif('not HAS_H5PY') def test_read_missing_group_fileobj(tmpdir): test_file = str(tmpdir.join('test.hdf5')) with h5py.File(test_file, 'w') as f: with pytest.raises(OSError) as exc: Table.read(f, path='test/path/table') assert exc.value.args[0] == "Path test/path/table does not exist" @pytest.mark.skipif('not HAS_H5PY') def test_read_write_simple(tmpdir): test_file = str(tmpdir.join('test.hdf5')) t1 = Table() t1.add_column(Column(name='a', data=[1, 2, 3])) t1.write(test_file, path='the_table') t2 = Table.read(test_file, path='the_table') assert np.all(t2['a'] == [1, 2, 3]) @pytest.mark.skipif('not HAS_H5PY') def test_read_write_existing_table(tmpdir): test_file = str(tmpdir.join('test.hdf5')) t1 = Table() t1.add_column(Column(name='a', data=[1, 2, 3])) t1.write(test_file, path='the_table') with pytest.raises(OSError) as exc: t1.write(test_file, path='the_table', append=True) assert exc.value.args[0] == "Table the_table already exists" @pytest.mark.skipif('not HAS_H5PY') def test_read_write_memory(tmpdir): with h5py.File('test', 'w', driver='core', backing_store=False) as output_file: t1 = Table() t1.add_column(Column(name='a', data=[1, 2, 3])) t1.write(output_file, path='the_table') t2 = Table.read(output_file, path='the_table') assert np.all(t2['a'] == [1, 2, 3]) @pytest.mark.skipif('not HAS_H5PY') def test_read_write_existing(tmpdir): test_file = str(tmpdir.join('test.hdf5')) h5py.File(test_file, 'w').close() # create empty file t1 = Table() t1.add_column(Column(name='a', data=[1, 2, 3])) with pytest.raises(OSError) as exc: t1.write(test_file, path='the_table') assert exc.value.args[0].startswith("File exists:") @pytest.mark.skipif('not HAS_H5PY') def test_read_write_existing_overwrite(tmpdir): test_file = str(tmpdir.join('test.hdf5')) h5py.File(test_file, 'w').close() # create empty file t1 = Table() t1.add_column(Column(name='a', data=[1, 2, 3])) t1.write(test_file, path='the_table', overwrite=True) t2 = Table.read(test_file, path='the_table') assert np.all(t2['a'] == [1, 2, 3]) @pytest.mark.skipif('not HAS_H5PY') def test_read_write_existing_append(tmpdir): test_file = str(tmpdir.join('test.hdf5')) h5py.File(test_file, 'w').close() # create empty file t1 = Table() t1.add_column(Column(name='a', data=[1, 2, 3])) t1.write(test_file, path='the_table_1', append=True) t1.write(test_file, path='the_table_2', append=True) t2 = Table.read(test_file, path='the_table_1') assert np.all(t2['a'] == [1, 2, 3]) t3 = Table.read(test_file, path='the_table_2') assert np.all(t3['a'] == [1, 2, 3]) @pytest.mark.skipif('not HAS_H5PY') def test_read_write_existing_append_groups(tmpdir): test_file = str(tmpdir.join('test.hdf5')) with h5py.File(test_file, 'w') as f: f.create_group('test_1') t1 = Table() t1.add_column(Column(name='a', data=[1, 2, 3])) t1.write(test_file, path='test_1/the_table_1', append=True) t1.write(test_file, path='test_2/the_table_2', append=True) t2 = Table.read(test_file, path='test_1/the_table_1') assert np.all(t2['a'] == [1, 2, 3]) t3 = Table.read(test_file, path='test_2/the_table_2') assert np.all(t3['a'] == [1, 2, 3]) @pytest.mark.skipif('not HAS_H5PY') def test_read_write_existing_append_overwrite(tmpdir): test_file = str(tmpdir.join('test.hdf5')) t1 = Table() t1.add_column(Column(name='a', data=[1, 2, 3])) t1.write(test_file, path='table1') t1.write(test_file, path='table2', append=True) t1v2 = Table() t1v2.add_column(Column(name='a', data=[4, 5, 6])) with pytest.raises(OSError) as exc: t1v2.write(test_file, path='table1', append=True) assert exc.value.args[0] == 'Table table1 already exists' t1v2.write(test_file, path='table1', append=True, overwrite=True) t2 = Table.read(test_file, path='table1') assert np.all(t2['a'] == [4, 5, 6]) t3 = Table.read(test_file, path='table2') assert np.all(t3['a'] == [1, 2, 3]) @pytest.mark.skipif('not HAS_H5PY') def test_read_fileobj(tmpdir): test_file = str(tmpdir.join('test.hdf5')) t1 = Table() t1.add_column(Column(name='a', data=[1, 2, 3])) t1.write(test_file, path='the_table') import h5py with h5py.File(test_file, 'r') as input_file: t2 = Table.read(input_file, path='the_table') assert np.all(t2['a'] == [1, 2, 3]) @pytest.mark.skipif('not HAS_H5PY') def test_read_filobj_path(tmpdir): test_file = str(tmpdir.join('test.hdf5')) t1 = Table() t1.add_column(Column(name='a', data=[1, 2, 3])) t1.write(test_file, path='path/to/data/the_table') import h5py with h5py.File(test_file, 'r') as input_file: t2 = Table.read(input_file, path='path/to/data/the_table') assert np.all(t2['a'] == [1, 2, 3]) @pytest.mark.skipif('not HAS_H5PY') def test_read_filobj_group_path(tmpdir): test_file = str(tmpdir.join('test.hdf5')) t1 = Table() t1.add_column(Column(name='a', data=[1, 2, 3])) t1.write(test_file, path='path/to/data/the_table') import h5py with h5py.File(test_file, 'r') as input_file: t2 = Table.read(input_file['path/to'], path='data/the_table') assert np.all(t2['a'] == [1, 2, 3]) @pytest.mark.skipif('not HAS_H5PY') def test_read_wrong_fileobj(): class FakeFile: def read(self): pass f = FakeFile() with pytest.raises(TypeError) as exc: t1 = Table.read(f, format='hdf5') assert exc.value.args[0] == 'h5py can only open regular files' @pytest.mark.skipif('not HAS_H5PY') def test_write_fileobj(tmpdir): test_file = str(tmpdir.join('test.hdf5')) import h5py with h5py.File(test_file, 'w') as output_file: t1 = Table() t1.add_column(Column(name='a', data=[1, 2, 3])) t1.write(output_file, path='the_table') t2 = Table.read(test_file, path='the_table') assert np.all(t2['a'] == [1, 2, 3]) @pytest.mark.skipif('not HAS_H5PY') def test_write_filobj_group(tmpdir): test_file = str(tmpdir.join('test.hdf5')) import h5py with h5py.File(test_file, 'w') as output_file: t1 = Table() t1.add_column(Column(name='a', data=[1, 2, 3])) t1.write(output_file, path='path/to/data/the_table') t2 = Table.read(test_file, path='path/to/data/the_table') assert np.all(t2['a'] == [1, 2, 3]) @pytest.mark.skipif('not HAS_H5PY') def test_write_wrong_type(): t1 = Table() t1.add_column(Column(name='a', data=[1, 2, 3])) with pytest.raises(TypeError) as exc: t1.write(1212, path='path/to/data/the_table', format='hdf5') assert exc.value.args[0] == ('output should be a string ' 'or an h5py File or Group object') @pytest.mark.skipif('not HAS_H5PY') @pytest.mark.parametrize(('dtype'), ALL_DTYPES) def test_preserve_single_dtypes(tmpdir, dtype): test_file = str(tmpdir.join('test.hdf5')) values = _default_values(dtype) t1 = Table() t1.add_column(Column(name='a', data=np.array(values, dtype=dtype))) t1.write(test_file, path='the_table') t2 = Table.read(test_file, path='the_table') assert np.all(t2['a'] == values) assert t2['a'].dtype == dtype @pytest.mark.skipif('not HAS_H5PY') def test_preserve_all_dtypes(tmpdir): test_file = str(tmpdir.join('test.hdf5')) t1 = Table() for dtype in ALL_DTYPES: values = _default_values(dtype) t1.add_column(Column(name=str(dtype), data=np.array(values, dtype=dtype))) t1.write(test_file, path='the_table') t2 = Table.read(test_file, path='the_table') for dtype in ALL_DTYPES: values = _default_values(dtype) assert np.all(t2[str(dtype)] == values) assert t2[str(dtype)].dtype == dtype @pytest.mark.skipif('not HAS_H5PY') def test_preserve_meta(tmpdir): test_file = str(tmpdir.join('test.hdf5')) t1 = Table() t1.add_column(Column(name='a', data=[1, 2, 3])) t1.meta['a'] = 1 t1.meta['b'] = 'hello' t1.meta['c'] = 3.14159 t1.meta['d'] = True t1.meta['e'] = np.array([1, 2, 3]) t1.write(test_file, path='the_table') t2 = Table.read(test_file, path='the_table') for key in t1.meta: assert np.all(t1.meta[key] == t2.meta[key]) @pytest.mark.skipif('not HAS_H5PY or not HAS_YAML') def test_preserve_serialized(tmpdir): test_file = str(tmpdir.join('test.hdf5')) t1 = Table() t1['a'] = Column(data=[1, 2, 3], unit="s") t1['a'].meta['a0'] = "A0" t1['a'].meta['a1'] = {"a1": [0, 1]} t1['a'].format = '7.3f' t1['a'].description = 'A column' t1.meta['b'] = 1 t1.meta['c'] = {"c0": [0, 1]} t1.write(test_file, path='the_table', serialize_meta=True, overwrite=True) t2 = Table.read(test_file, path='the_table') assert t1['a'].unit == t2['a'].unit assert t1['a'].format == t2['a'].format assert t1['a'].description == t2['a'].description assert t1['a'].meta == t2['a'].meta assert t1.meta == t2.meta @pytest.mark.skipif('not HAS_H5PY or not HAS_YAML') def test_preserve_serialized_compatibility_mode(tmpdir): test_file = str(tmpdir.join('test.hdf5')) t1 = Table() t1['a'] = Column(data=[1, 2, 3], unit="s") t1['a'].meta['a0'] = "A0" t1['a'].meta['a1'] = {"a1": [0, 1]} t1['a'].format = '7.3f' t1['a'].description = 'A column' t1.meta['b'] = 1 t1.meta['c'] = {"c0": [0, 1]} with catch_warnings() as w: t1.write(test_file, path='the_table', serialize_meta=True, overwrite=True, compatibility_mode=True) assert str(w[0].message).startswith( "compatibility mode for writing is deprecated") t2 = Table.read(test_file, path='the_table') assert t1['a'].unit == t2['a'].unit assert t1['a'].format == t2['a'].format assert t1['a'].description == t2['a'].description assert t1['a'].meta == t2['a'].meta assert t1.meta == t2.meta @pytest.mark.skipif('not HAS_H5PY or not HAS_YAML') def test_preserve_serialized_in_complicated_path(tmpdir): test_file = str(tmpdir.join('test.hdf5')) t1 = Table() t1['a'] = Column(data=[1, 2, 3], unit="s") t1['a'].meta['a0'] = "A0" t1['a'].meta['a1'] = {"a1": [0, 1]} t1['a'].format = '7.3f' t1['a'].description = 'A column' t1.meta['b'] = 1 t1.meta['c'] = {"c0": [0, 1]} t1.write(test_file, path='the_table/complicated/path', serialize_meta=True, overwrite=True) t2 = Table.read(test_file, path='the_table/complicated/path') assert t1['a'].format == t2['a'].format assert t1['a'].unit == t2['a'].unit assert t1['a'].description == t2['a'].description assert t1['a'].meta == t2['a'].meta assert t1.meta == t2.meta @pytest.mark.skipif('not HAS_H5PY or not HAS_YAML') def test_metadata_very_large(tmpdir): """Test that very large datasets work, now!""" test_file = str(tmpdir.join('test.hdf5')) t1 = Table() t1['a'] = Column(data=[1, 2, 3], unit="s") t1['a'].meta['a0'] = "A0" t1['a'].meta['a1'] = {"a1": [0, 1]} t1['a'].format = '7.3f' t1['a'].description = 'A column' t1.meta['b'] = 1 t1.meta['c'] = {"c0": [0, 1]} t1.meta["meta_big"] = "0" * (2 ** 16 + 1) t1.meta["meta_biggerstill"] = "0" * (2 ** 18) t1.write(test_file, path='the_table', serialize_meta=True, overwrite=True) t2 = Table.read(test_file, path='the_table') assert t1['a'].unit == t2['a'].unit assert t1['a'].format == t2['a'].format assert t1['a'].description == t2['a'].description assert t1['a'].meta == t2['a'].meta assert t1.meta == t2.meta @pytest.mark.skipif('not HAS_H5PY or not HAS_YAML') def test_metadata_very_large_fails_compatibility_mode(tmpdir): """Test that very large metadata do not work in compatibility mode.""" test_file = str(tmpdir.join('test.hdf5')) t1 = Table() t1['a'] = Column(data=[1, 2, 3]) t1.meta["meta"] = "0" * (2 ** 16 + 1) with catch_warnings() as w: t1.write(test_file, path='the_table', serialize_meta=True, overwrite=True, compatibility_mode=True) assert len(w) == 2 # Error message slightly changed in h5py 2.7.1, thus the 2part assert assert str(w[1].message).startswith( "Attributes could not be written to the output HDF5 " "file: Unable to create attribute ") assert "bject header message is too large" in str(w[1].message) @pytest.mark.skipif('not HAS_H5PY') def test_skip_meta(tmpdir): test_file = str(tmpdir.join('test.hdf5')) t1 = Table() t1.add_column(Column(name='a', data=[1, 2, 3])) t1.meta['a'] = 1 t1.meta['b'] = 'hello' t1.meta['c'] = 3.14159 t1.meta['d'] = True t1.meta['e'] = np.array([1, 2, 3]) t1.meta['f'] = str with catch_warnings() as w: t1.write(test_file, path='the_table') assert len(w) == 1 assert str(w[0].message).startswith( "Attribute `f` of type {0} cannot be written to HDF5 files - skipping".format(type(t1.meta['f']))) @pytest.mark.skipif('not HAS_H5PY or not HAS_YAML') def test_fail_meta_serialize(tmpdir): test_file = str(tmpdir.join('test.hdf5')) t1 = Table() t1.add_column(Column(name='a', data=[1, 2, 3])) t1.meta['f'] = str with pytest.raises(Exception) as err: t1.write(test_file, path='the_table', serialize_meta=True) assert "cannot represent an object: <class 'str'>" in str(err) @pytest.mark.skipif('not HAS_H5PY') def test_read_h5py_objects(tmpdir): # Regression test - ensure that Datasets are recognized automatically test_file = str(tmpdir.join('test.hdf5')) import h5py with h5py.File(test_file, 'w') as output_file: t1 = Table() t1.add_column(Column(name='a', data=[1, 2, 3])) t1.write(output_file, path='the_table') f = h5py.File(test_file) t2 = Table.read(f, path='the_table') assert np.all(t2['a'] == [1, 2, 3]) t3 = Table.read(f['/'], path='the_table') assert np.all(t3['a'] == [1, 2, 3]) t4 = Table.read(f['the_table']) assert np.all(t4['a'] == [1, 2, 3]) f.close() # don't raise an error in 'test --open-files' def assert_objects_equal(obj1, obj2, attrs, compare_class=True): if compare_class: assert obj1.__class__ is obj2.__class__ info_attrs = ['info.name', 'info.format', 'info.unit', 'info.description', 'info.meta'] for attr in attrs + info_attrs: a1 = obj1 a2 = obj2 for subattr in attr.split('.'): try: a1 = getattr(a1, subattr) a2 = getattr(a2, subattr) except AttributeError: a1 = a1[subattr] a2 = a2[subattr] # Mixin info.meta can None instead of empty OrderedDict(), #6720 would # fix this. if attr == 'info.meta': if a1 is None: a1 = {} if a2 is None: a2 = {} assert np.all(a1 == a2) # Testing HDF5 table read/write with mixins. This is mostly # copied from FITS mixin testing. el = EarthLocation(x=1 * u.km, y=3 * u.km, z=5 * u.km) el2 = EarthLocation(x=[1, 2] * u.km, y=[3, 4] * u.km, z=[5, 6] * u.km) sc = SkyCoord([1, 2], [3, 4], unit='deg,deg', frame='fk4', obstime='J1990.5') scc = sc.copy() scc.representation = 'cartesian' tm = Time([2450814.5, 2450815.5], format='jd', scale='tai', location=el) mixin_cols = { 'tm': tm, 'dt': TimeDelta([1, 2] * u.day), 'sc': sc, 'scc': scc, 'scd': SkyCoord([1, 2], [3, 4], [5, 6], unit='deg,deg,m', frame='fk4', obstime=['J1990.5', 'J1991.5']), 'q': [1, 2] * u.m, 'lat': Latitude([1, 2] * u.deg), 'lon': Longitude([1, 2] * u.deg, wrap_angle=180. * u.deg), 'ang': Angle([1, 2] * u.deg), 'el2': el2, } time_attrs = ['value', 'shape', 'format', 'scale', 'location'] compare_attrs = { 'c1': ['data'], 'c2': ['data'], 'tm': time_attrs, 'dt': ['shape', 'value', 'format', 'scale'], 'sc': ['ra', 'dec', 'representation', 'frame.name'], 'scc': ['x', 'y', 'z', 'representation', 'frame.name'], 'scd': ['ra', 'dec', 'distance', 'representation', 'frame.name'], 'q': ['value', 'unit'], 'lon': ['value', 'unit', 'wrap_angle'], 'lat': ['value', 'unit'], 'ang': ['value', 'unit'], 'el2': ['x', 'y', 'z', 'ellipsoid'], 'nd': ['x', 'y', 'z'], } @pytest.mark.skipif('not HAS_H5PY or not HAS_YAML') def test_hdf5_mixins_qtable_to_table(tmpdir): """Test writing as QTable and reading as Table. Ensure correct classes come out. """ filename = str(tmpdir.join('test_simple.hdf5')) names = sorted(mixin_cols) t = QTable([mixin_cols[name] for name in names], names=names) t.write(filename, format='hdf5', path='root', serialize_meta=True) t2 = Table.read(filename, format='hdf5', path='root') assert t.colnames == t2.colnames for name, col in t.columns.items(): col2 = t2[name] # Special-case Time, which does not yet support round-tripping # the format. if isinstance(col2, Time): col2.format = col.format attrs = compare_attrs[name] compare_class = True if isinstance(col.info, QuantityInfo): # Downgrade Quantity to Column + unit assert type(col2) is Column # Class-specific attributes like `value` or `wrap_angle` are lost. attrs = ['unit'] compare_class = False # Compare data values here (assert_objects_equal doesn't know how in this case) assert np.all(col.value == col2) assert_objects_equal(col, col2, attrs, compare_class) @pytest.mark.skipif('not HAS_H5PY or not HAS_YAML') @pytest.mark.parametrize('table_cls', (Table, QTable)) def test_hdf5_mixins_as_one(table_cls, tmpdir): """Test write/read all cols at once and validate intermediate column names""" filename = str(tmpdir.join('test_simple.hdf5')) names = sorted(mixin_cols) serialized_names = ['ang', 'dt.jd1', 'dt.jd2', 'el2.x', 'el2.y', 'el2.z', 'lat', 'lon', 'q', 'sc.ra', 'sc.dec', 'scc.x', 'scc.y', 'scc.z', 'scd.ra', 'scd.dec', 'scd.distance', 'scd.obstime.jd1', 'scd.obstime.jd2', 'tm.jd1', 'tm.jd2', ] t = table_cls([mixin_cols[name] for name in names], names=names) t.meta['C'] = 'spam' t.meta['comments'] = ['this', 'is', 'a', 'comment'] t.meta['history'] = ['first', 'second', 'third'] t.write(filename, format="hdf5", path='root', serialize_meta=True) t2 = table_cls.read(filename, format='hdf5', path='root') assert t2.meta['C'] == 'spam' assert t2.meta['comments'] == ['this', 'is', 'a', 'comment'] assert t2.meta['history'] == ['first', 'second', 'third'] assert t.colnames == t2.colnames # Read directly via hdf5 and confirm column names h5 = h5py.File(filename, 'r') assert list(h5['root'].dtype.names) == serialized_names h5.close() @pytest.mark.skipif('not HAS_H5PY or not HAS_YAML') @pytest.mark.parametrize('name_col', list(mixin_cols.items())) @pytest.mark.parametrize('table_cls', (Table, QTable)) def test_hdf5_mixins_per_column(table_cls, name_col, tmpdir): """Test write/read one col at a time and do detailed validation""" filename = str(tmpdir.join('test_simple.hdf5')) name, col = name_col c = [1.0, 2.0] t = table_cls([c, col, c], names=['c1', name, 'c2']) t[name].info.description = 'my description' t[name].info.meta = {'list': list(range(50)), 'dict': {'a': 'b' * 200}} if not t.has_mixin_columns: pytest.skip('column is not a mixin (e.g. Quantity subclass in Table)') if isinstance(t[name], NdarrayMixin): pytest.xfail('NdarrayMixin not supported') t.write(filename, format="hdf5", path='root', serialize_meta=True) t2 = table_cls.read(filename, format='hdf5', path='root') assert t.colnames == t2.colnames for colname in t.colnames: assert_objects_equal(t[colname], t2[colname], compare_attrs[colname]) # Special case to make sure Column type doesn't leak into Time class data if name.startswith('tm'): assert t2[name]._time.jd1.__class__ is np.ndarray assert t2[name]._time.jd2.__class__ is np.ndarray @pytest.mark.skipif('HAS_YAML or not HAS_H5PY') def test_warn_for_dropped_info_attributes(tmpdir): filename = str(tmpdir.join('test.hdf5')) t = Table([[1, 2]]) t['col0'].info.description = 'hello' with catch_warnings() as warns: t.write(filename, path='root', serialize_meta=False) assert len(warns) == 1 assert str(warns[0].message).startswith( "table contains column(s) with defined 'unit'") @pytest.mark.skipif('HAS_YAML or not HAS_H5PY') def test_error_for_mixins_but_no_yaml(tmpdir): filename = str(tmpdir.join('test.hdf5')) t = Table([mixin_cols['sc']]) with pytest.raises(TypeError) as err: t.write(filename, path='root', serialize_meta=True) assert "cannot write type SkyCoord column 'col0' to HDF5 without PyYAML" in str(err)
funbaker/astropy
astropy/io/misc/tests/test_hdf5.py
astropy/utils/compat/numpycompat.py
import json import logging from django.core.exceptions import ValidationError, ObjectDoesNotExist from django.core.validators import URLValidator from django.utils.datetime_safe import datetime from app.ciscoeox.exception import ConnectionFailedException, CiscoApiCallFailed from app.ciscoeox.base_api import CiscoEoxApi from app.config.settings import AppSettings from app.productdb.models import Product, Vendor, ProductMigrationSource, ProductMigrationOption logger = logging.getLogger("productdb") def convert_time_format(date_format): """ helper function to convert the data format that is used by the Cisco EoX API :param date_format: :return: """ if date_format == "YYYY-MM-DD": return "%Y-%m-%d" return "%Y-%m-%d" def clean_api_url_response(url_response): """ clean the string to a valid URL field (used with API data, because sometimes there are multiple or entries """ clean_response = url_response.strip() if url_response != "": clean_response = clean_response if ";" not in clean_response else clean_response.split(";")[0] clean_response = clean_response if " or http://" not in clean_response \ else clean_response.split(" or http://")[0] clean_response = clean_response if " and http://" not in clean_response \ else clean_response.split(" and http://")[0] clean_response = clean_response if " http://" not in clean_response \ else clean_response.split(" http://")[0] clean_response = clean_response if " and https://" not in clean_response \ else clean_response.split(" and https://")[0] clean_response = clean_response if " or https://" not in clean_response \ else clean_response.split(" or https://")[0] clean_response = clean_response if " https://" not in clean_response \ else clean_response.split(" https://")[0] return clean_response def update_local_db_based_on_record(eox_record, create_missing=False): """ update a database entry based on an EoX record provided by the Cisco EoX API :param eox_record: JSON data from the Cisco EoX API :param create_missing: set to True, if the product should be created if it's not part of the local database :return: returns an error message or None if successful """ pid = eox_record['EOLProductID'] # only used with Cisco Products v = Vendor.objects.get(name="Cisco Systems") if create_missing: product, created = Product.objects.get_or_create( product_id=pid, vendor=v ) else: try: product = Product.objects.get( product_id=pid, vendor=v ) created = False except ObjectDoesNotExist: logger.debug("%15s: Product not found in database (create disabled)" % pid, exc_info=True) return None if created: product.product_id = pid product.description = eox_record['ProductIDDescription'] # it is a Cisco API and the vendors are predefined in the database product.vendor = v logger.debug("%15s: Product created" % pid) # update the lifecycle information try: logger.debug("%15s: update product lifecycle values" % pid) # save datetime values from Cisco EoX API record value_map = { # <API value> : <class attribute> "UpdatedTimeStamp": "eox_update_time_stamp", "EndOfSaleDate": "end_of_sale_date", "LastDateOfSupport": "end_of_support_date", "EOXExternalAnnouncementDate": "eol_ext_announcement_date", "EndOfSWMaintenanceReleases": "end_of_sw_maintenance_date", "EndOfRoutineFailureAnalysisDate": "end_of_routine_failure_analysis", "EndOfServiceContractRenewal": "end_of_service_contract_renewal", "EndOfSvcAttachDate": "end_of_new_service_attachment_date", "EndOfSecurityVulSupportDate": "end_of_sec_vuln_supp_date", } for key in value_map.keys(): if eox_record.get(key, None): value = eox_record[key].get("value", None) value = value.strip() if value else "" if value != "": setattr( product, value_map[key], datetime.strptime( value, convert_time_format(eox_record[key].get("dateFormat", "%Y-%m-%d")) ).date() ) else: # required if date is removed after an earlier sync setattr( product, value_map[key], None ) # save string values from Cisco EoX API record if "LinkToProductBulletinURL" in eox_record.keys(): value = clean_api_url_response(eox_record.get('LinkToProductBulletinURL', "")) if value != "": val = URLValidator() try: val(value) product.eol_reference_url = value except ValidationError: raise Exception("invalid EoL reference URL") if "ProductBulletinNumber" in eox_record.keys(): product.eol_reference_number = eox_record.get('ProductBulletinNumber', "EoL bulletin") product.save() except Exception as ex: if created: # remove the new (incomplete) entry from the database product.delete() logger.error("%15s: Product Data update failed." % pid, exc_info=True) logger.debug("%15s: DataSet with exception\n%s" % (pid, json.dumps(eox_record, indent=4))) return "Product Data update failed: %s" % str(ex) # save migration information if defined if "EOXMigrationDetails" in eox_record: migration_details = eox_record["EOXMigrationDetails"] product_migration_source, created = ProductMigrationSource.objects.get_or_create( name="Cisco EoX Migration option" ) if created: product_migration_source.description = "Migration option suggested by the Cisco EoX API." product_migration_source.save() if "MigrationOption" in migration_details: candidate_replacement_pid = migration_details["MigrationProductId"].strip() if candidate_replacement_pid == pid: logger.error("Product ID '%s' should be replaced by itself, which is not possible" % pid) else: # only a single migration option per migration source is allowed pmo, _ = ProductMigrationOption.objects.get_or_create(product=product, migration_source=product_migration_source) if migration_details["MigrationOption"] == "Enter PID(s)": # product replacement available, add replacement PID pmo.replacement_product_id = candidate_replacement_pid pmo.migration_product_info_url = clean_api_url_response(migration_details["MigrationProductInfoURL"]) elif migration_details["MigrationOption"] == "See Migration Section" or \ migration_details["MigrationOption"] == "Enter Product Name(s)": # complex product migration, only add comment mig_strat = migration_details["MigrationStrategy"].strip() pmo.comment = mig_strat if mig_strat != "" else migration_details["MigrationProductName"].strip() pmo.migration_product_info_url = clean_api_url_response(migration_details["MigrationProductInfoURL"]) else: # no replacement available, only add comment pmo.comment = migration_details["MigrationOption"].strip() # some data separated by blank pmo.migration_product_info_url = clean_api_url_response(migration_details["MigrationProductInfoURL"]) # add message if only a single entry was saved if pmo.migration_product_info_url != migration_details["MigrationProductInfoURL"].strip(): return "Multiple URL values from the Migration Note received, only the first one is saved" pmo.save() def get_raw_api_data(api_query=None, year=None): """ returns all EoX records for a specific query (from all pages) :param api_query: single query that is send to the Cisco EoX API :param year: get all EoX data that are announced in a specific year :raises CiscoApiCallFailed: exception raised if Cisco EoX API call failed :return: list that contains all EoX records from the Cisco EoX API """ if api_query is None and year is None: raise ValueError("either year or the api_query must be provided") if api_query: if type(api_query) is not str: raise ValueError("api_query must be a string value") if year: if type(year) is not int: raise ValueError("year must be an integer value") # load application settings and check, that the API is enabled app_settings = AppSettings() if not app_settings.is_cisco_api_enabled(): msg = "Cisco API access not enabled" logger.warning(msg) raise CiscoApiCallFailed(msg) # start Cisco EoX API query logger.info("send query to Cisco EoX database: %s" % api_query) eoxapi = CiscoEoxApi() eoxapi.load_client_credentials() results = [] try: current_page = 1 result_pages = 999 while current_page <= result_pages: logger.info("Executing API query %s on page '%d" % ('%s' % api_query if api_query else "for year %d" % year, current_page)) # will raise a CiscoApiCallFailed exception on error if year: eoxapi.query_year(year_to_query=year, page=current_page) else: eoxapi.query_product(product_id=api_query, page=current_page) result_pages = eoxapi.amount_of_pages() if eoxapi.get_page_record_count() > 0: results.extend(eoxapi.get_eox_records()) current_page += 1 except ConnectionFailedException: logger.error("Query failed, server not reachable: %s" % api_query, exc_info=True) raise except CiscoApiCallFailed: logger.fatal("Query failed: %s" % api_query, exc_info=True) raise logger.debug("found %d records for year %s" % (len(results), year)) return results
""" Test suite for the ciscoeox.tasks module """ import datetime import pytest import os import requests from requests import Response from app.ciscoeox import tasks from app.ciscoeox.exception import CiscoApiCallFailed, CredentialsNotFoundException from app.config import utils from app.productdb import models as productdb_models from app.config.models import NotificationMessage from app.config.settings import AppSettings from app.productdb.models import Product, Vendor import app.ciscoeox.api_crawler as cisco_eox_api_crawler from django_project.celery import TaskState pytestmark = pytest.mark.django_db @pytest.fixture def use_test_api_configuration(): app = AppSettings() app.set_cisco_api_enabled(True) app.set_product_blacklist_regex("") app.set_cisco_eox_api_queries("") app.set_auto_create_new_products(True) app.set_periodic_sync_enabled(False) app.set_cisco_api_client_id(os.getenv("TEST_CISCO_API_CLIENT_ID", "dummy_id")) app.set_cisco_api_client_secret(os.getenv("TEST_CISCO_API_CLIENT_SECRET", "dummy_secret")) @pytest.mark.usefixtures("mock_cisco_api_authentication_server") @pytest.mark.usefixtures("use_test_api_configuration") @pytest.mark.usefixtures("set_celery_always_eager") @pytest.mark.usefixtures("redis_server_required") @pytest.mark.usefixtures("import_default_vendors") class TestExecuteTaskToSynchronizeCiscoEoxStateTask: def mock_api_call(self, monkeypatch): # mock the underlying API call class MockSession: def get(self, *args, **kwargs): r = Response() r.status_code = 200 with open("app/ciscoeox/tests/data/cisco_eox_response_page_1_of_1.json") as f: r._content = f.read().encode("utf-8") return r monkeypatch.setattr(requests, "Session", MockSession) def test_manual_task(self, monkeypatch): self.mock_api_call(monkeypatch) # try to execute it, while no auto-sync is enabled app = AppSettings() app.set_periodic_sync_enabled(False) app.set_cisco_eox_api_queries("WS-C2960-*") task = tasks.execute_task_to_synchronize_cisco_eox_states.delay(ignore_periodic_sync_flag=True) expected_result = '<p style="text-align: left;">The following queries were executed:<br>' \ '<ul style="text-align: left;"><li><code>WS-C2960-*</code> (<b>affects 3 products</b>, ' \ 'success)</li></ul></p>' assert task is not None assert task.status == "SUCCESS", task.traceback assert task.state == TaskState.SUCCESS assert task.info.get("status_message") == expected_result assert NotificationMessage.objects.count() == 1, "Task should create a Notification Message" assert Product.objects.count() == 3, "Three products are part of the update" # test update required p = Product.objects.get(product_id="WS-C2950G-24-EI") p.eox_update_time_stamp = datetime.date(1999, 1, 1) p.save() task = tasks.execute_task_to_synchronize_cisco_eox_states.delay(ignore_periodic_sync_flag=True) expected_result = '<p style="text-align: left;">The following queries were executed:<br>' \ '<ul style="text-align: left;"><li><code>WS-C2960-*</code> (<b>affects 3 products</b>, ' \ 'success)</li></ul></p>' assert task is not None assert task.status == "SUCCESS", task.traceback assert task.state == TaskState.SUCCESS assert task.info.get("status_message") == expected_result assert NotificationMessage.objects.count() == 2, "Task should create a Notification Message" assert Product.objects.count() == 3, "Three products are part of the update" def test_manual_task_with_single_blacklist_entry(self, monkeypatch): self.mock_api_call(monkeypatch) # try to execute it, while no auto-sync is enabled app = AppSettings() app.set_periodic_sync_enabled(False) app.set_cisco_eox_api_queries("WS-C2960-*") app.set_product_blacklist_regex("WS-C2950G-24-EI") task = tasks.execute_task_to_synchronize_cisco_eox_states.delay(ignore_periodic_sync_flag=True) expected_result = '<p style="text-align: left;">The following queries were executed:<br>' \ '<ul style="text-align: left;"><li><code>WS-C2960-*</code> (<b>affects 3 products</b>, ' \ 'success)</li></ul></p>' assert task is not None assert task.status == "SUCCESS", task.traceback assert task.state == TaskState.SUCCESS assert task.info.get("status_message") == expected_result assert NotificationMessage.objects.count() == 1, "Task should create a Notification Message" assert Product.objects.count() == 2 nm = NotificationMessage.objects.first() assert nm.type == NotificationMessage.MESSAGE_SUCCESS, "Incomplete configuration, should throw a warning " def test_manual_task_with_multiple_blacklist_entries(self, monkeypatch): self.mock_api_call(monkeypatch) # try to execute it, while no auto-sync is enabled app = AppSettings() app.set_periodic_sync_enabled(False) app.set_cisco_eox_api_queries("WS-C2960-*") app.set_product_blacklist_regex("WS-C2950G-48-EI-WS;WS-C2950G-24-EI") task = tasks.execute_task_to_synchronize_cisco_eox_states.delay(ignore_periodic_sync_flag=True) expected_result = [ '<p style="text-align: left;">The following queries were executed:<br>' '<ul style="text-align: left;"><li><code>WS-C2960-*</code> (<b>affects 3 products</b>, ' 'success)</li></ul>' ] assert task is not None assert task.status == "SUCCESS", task.traceback assert task.state == TaskState.SUCCESS for er in expected_result: assert er in task.info.get("status_message") assert NotificationMessage.objects.count() == 1, "Task should create a Notification Message" assert Product.objects.count() == 1, "Only a single product is imported" nm = NotificationMessage.objects.first() assert nm.type == NotificationMessage.MESSAGE_SUCCESS, "Incomplete configuration, should throw a warning " def test_periodic_task_without_queries(self, monkeypatch): self.mock_api_call(monkeypatch) # test automatic trigger app = AppSettings() app.set_periodic_sync_enabled(True) app.set_cisco_eox_api_queries("") task = tasks.execute_task_to_synchronize_cisco_eox_states.delay() assert task is not None assert task.status == "SUCCESS", task.traceback assert task.state == TaskState.SUCCESS assert task.info.get("status_message") == "No Cisco EoX API queries configured." assert NotificationMessage.objects.count() == 1, "Task should create a Notification Message" nm = NotificationMessage.objects.first() assert nm.type == NotificationMessage.MESSAGE_WARNING, "Incomplete configuration, should throw a warning " \ "message" def test_api_call_error(self, monkeypatch): # force API failure class MockSession: def get(self, *args, **kwargs): raise CiscoApiCallFailed("The API is broken") monkeypatch.setattr(requests, "Session", MockSession) # test automatic trigger app = AppSettings() app.set_periodic_sync_enabled(True) app.set_cisco_eox_api_queries("yxcz") task = tasks.execute_task_to_synchronize_cisco_eox_states.delay() expected_status_message = '<p style="text-align: left;">The following queries were executed:<br>' \ '<ul style="text-align: left;">' \ '<li class="text-danger"><code>yxcz</code> (failed, cannot contact API endpoint ' \ 'at https://api.cisco.com/supporttools/eox/rest/5/EOXByProductID/1/yxcz)</li>' \ '</ul></p>' assert task is not None assert task.status == "SUCCESS", task.traceback assert task.state == TaskState.SUCCESS assert task.info.get("status_message") == expected_status_message assert NotificationMessage.objects.count() == 1, "Task should create a Notification Message" nm = NotificationMessage.objects.first() assert nm.type == NotificationMessage.MESSAGE_ERROR, "Should be an error message, because all queries failed" def test_credentials_not_found(self, monkeypatch): # force API failure class MockSession: def get(self, *args, **kwargs): raise CredentialsNotFoundException("Something is wrong with the credentials handling") monkeypatch.setattr(requests, "Session", MockSession) # test automatic trigger app = AppSettings() app.set_periodic_sync_enabled(True) app.set_cisco_eox_api_queries("yxcz") task = tasks.execute_task_to_synchronize_cisco_eox_states.delay() expected_status_message = '<p style="text-align: left;">The following queries were executed:<br>' \ '<ul style="text-align: left;"><li class="text-danger"><code>yxcz</code> ' \ '(failed, cannot contact API endpoint at ' \ 'https://api.cisco.com/supporttools/eox/rest/5/EOXByProductID/1/yxcz)</li></ul></p>' assert task is not None assert task.status == "SUCCESS", task.traceback assert task.state == TaskState.SUCCESS assert task.info.get("status_message") == expected_status_message assert NotificationMessage.objects.count() == 1, "Task should create a Notification Message" nm = NotificationMessage.objects.first() assert nm.type == NotificationMessage.MESSAGE_ERROR, "Should be an error message, because all queries failed" def test_api_check_failed(self, monkeypatch): # force API failure class MockSession: def get(self, *args, **kwargs): raise Exception("The API is broken") monkeypatch.setattr(requests, "Session", MockSession) # test automatic trigger app = AppSettings() app.set_periodic_sync_enabled(True) app.set_cisco_eox_api_queries("yxcz") task = tasks.execute_task_to_synchronize_cisco_eox_states.delay() expected_status_message = "<p style=\"text-align: left;\">The following queries were executed:<br>" \ "<ul style=\"text-align: left;\"><li class=\"text-danger\"><code>yxcz</code> " \ "(failed, cannot contact API endpoint at " \ "https://api.cisco.com/supporttools/eox/rest/5/EOXByProductID/1/yxcz)</li></ul></p>" assert task is not None assert task.status == "SUCCESS", task.traceback assert task.state == TaskState.SUCCESS assert task.info.get("status_message") == expected_status_message assert NotificationMessage.objects.count() == 1, "Task should create a Notification Message" nm = NotificationMessage.objects.first() assert nm.type == NotificationMessage.MESSAGE_ERROR, "Should be an error message, because all queries failed" def test_periodic_task_enabled_state(self, monkeypatch): self.mock_api_call(monkeypatch) # test automatic trigger app = AppSettings() app.set_periodic_sync_enabled(False) task = tasks.execute_task_to_synchronize_cisco_eox_states.delay() assert task is not None assert task.status == "SUCCESS", task.traceback assert task.state == TaskState.SUCCESS assert task.info.get("status_message") == "task not enabled" app.set_periodic_sync_enabled(True) task = tasks.execute_task_to_synchronize_cisco_eox_states.delay() assert task is not None assert task.status == "SUCCESS", task.traceback assert task.state == TaskState.SUCCESS assert task.info.get("status_message") != "task not enabled" def test_execute_task_to_synchronize_cisco_eox_states_with_failed_api_query(self, monkeypatch): def raise_ciscoapicallfailed(): raise CiscoApiCallFailed("Cisco API call failed message") monkeypatch.setattr(utils, "check_cisco_eox_api_access", lambda x, y, z: True) monkeypatch.setattr( cisco_eox_api_crawler, "get_raw_api_data", lambda api_query=None, year=None: raise_ciscoapicallfailed() ) # test automatic trigger app = AppSettings() app.set_periodic_sync_enabled(True) app.set_cisco_eox_api_queries("yxcz") task = tasks.execute_task_to_synchronize_cisco_eox_states.delay() expected_status_message = "<p style=\"text-align: left;\">The following queries were executed:<br>" \ "<ul style=\"text-align: left;\"><li class=\"text-danger\">" \ "<code>yxcz</code> (failed, Cisco API call failed message)</li></ul></p>" assert task is not None assert task.status == "SUCCESS", task.traceback assert task.state == TaskState.SUCCESS assert task.info.get("status_message") == expected_status_message assert NotificationMessage.objects.count() == 1, "Task should create a Notification Message" nm = NotificationMessage.objects.first() assert nm.type == NotificationMessage.MESSAGE_ERROR, "Should be an error message, because all queries failed" @pytest.mark.usefixtures("set_celery_always_eager") @pytest.mark.usefixtures("redis_server_required") class TestPopulateProductLifecycleStateSyncTask: def test_no_cisco_vendor(self): result = tasks.cisco_eox_populate_product_lc_state_sync_field.delay() assert result.status == "SUCCESS" assert result.info == {"error": "Vendor \"Cisco Systems\" not found in database"} @pytest.mark.usefixtures("import_default_vendors") def test_no_cisco_products(self): result = tasks.cisco_eox_populate_product_lc_state_sync_field.delay() assert result.status == "SUCCESS" assert result.info == {"error": "No Products associated to \"Cisco Systems\" found in database"} @pytest.mark.usefixtures("import_default_vendors") def test_populate_flag_on_cisco_products(self): app_config = AppSettings() v = Vendor.objects.get(id=1) productdb_models.Product.objects.create(product_id="est", vendor=v, lc_state_sync=False) productdb_models.Product.objects.create(product_id="Test", vendor=v, lc_state_sync=False) productdb_models.Product.objects.create(product_id="TestA", vendor=v, lc_state_sync=False) productdb_models.Product.objects.create(product_id="TestB", vendor=v, lc_state_sync=False) productdb_models.Product.objects.create(product_id="TestC", vendor=v, lc_state_sync=False) productdb_models.Product.objects.create(product_id="ControlItem", vendor=v, lc_state_sync=False) # the following item is part of every query, but is never synced because of the wrong vendor productdb_models.Product.objects.create(product_id="Other ControlItem", lc_state_sync=False) app_config.set_cisco_eox_api_queries("") app_config.set_periodic_sync_enabled(True) result = tasks.cisco_eox_populate_product_lc_state_sync_field.delay() assert result.status == "SUCCESS" assert Product.objects.filter(lc_state_sync=True).count() == 0, "No queries configured" app_config.set_cisco_eox_api_queries("Test\nOther ControlItem") result = tasks.cisco_eox_populate_product_lc_state_sync_field.delay() filterquery = Product.objects.filter(lc_state_sync=True) assert result.status == "SUCCESS" assert list(filterquery.order_by("id").values_list("product_id", flat=True)) == ["Test"] app_config.set_cisco_eox_api_queries("Test*\nOther ControlItem") result = tasks.cisco_eox_populate_product_lc_state_sync_field.delay() filterquery = Product.objects.filter(lc_state_sync=True) expected_result_list = [ "Test", "TestA", "TestB", "TestC", ] assert result.status == "SUCCESS" assert list(filterquery.order_by("id").values_list("product_id", flat=True)) == expected_result_list app_config.set_cisco_eox_api_queries("*estB\nOther ControlItem") result = tasks.cisco_eox_populate_product_lc_state_sync_field.delay() filterquery = Product.objects.filter(lc_state_sync=True) expected_result_list = [ "TestB", ] assert result.status == "SUCCESS" assert list(filterquery.order_by("id").values_list("product_id", flat=True)) == expected_result_list app_config.set_cisco_eox_api_queries("*estB\nOther ControlItem") result = tasks.cisco_eox_populate_product_lc_state_sync_field.delay() filterquery = Product.objects.filter(lc_state_sync=True) expected_result_list = [ "TestB", ] assert result.status == "SUCCESS" assert list(filterquery.order_by("id").values_list("product_id", flat=True)) == expected_result_list app_config.set_cisco_eox_api_queries("*es*\nOther ControlItem") result = tasks.cisco_eox_populate_product_lc_state_sync_field.delay() filterquery = Product.objects.filter(lc_state_sync=True) expected_result_list = [ "est", "Test", "TestA", "TestB", "TestC", ] assert result.status == "SUCCESS" assert list(filterquery.order_by("id").values_list("product_id", flat=True)) == expected_result_list app_config.set_cisco_eox_api_queries("*es*\nOther ControlItem") app_config.set_periodic_sync_enabled(False) result = tasks.cisco_eox_populate_product_lc_state_sync_field.delay() filterquery = Product.objects.filter(lc_state_sync=True) assert result.status == "SUCCESS" assert filterquery.count() == 0, "Periodic sync disabled, no value should be true" @pytest.mark.usefixtures("mock_cisco_api_authentication_server") @pytest.mark.usefixtures("use_test_api_configuration") @pytest.mark.usefixtures("set_celery_always_eager") @pytest.mark.usefixtures("redis_server_required") @pytest.mark.usefixtures("import_default_vendors") class TestInitialSyncWithCiscoEoXApiTask: def mock_api_call(self, monkeypatch): # mock the underlying API call class MockSession: def get(self, *args, **kwargs): r = Response() r.status_code = 200 with open("app/ciscoeox/tests/data/cisco_eox_response_page_1_of_1.json") as f: r._content = f.read().encode("utf-8") return r monkeypatch.setattr(requests, "Session", MockSession) def test_initial_import_with_invalid_parameters(self): with pytest.raises(AttributeError) as ex: tasks.initial_sync_with_cisco_eox_api.delay(years_list="invalid parameter") assert ex.match("years_list must be a list") with pytest.raises(AttributeError) as ex: tasks.initial_sync_with_cisco_eox_api.delay(years_list=["12", "13"]) assert ex.match("years_list must be a list of integers") task = tasks.initial_sync_with_cisco_eox_api.delay(years_list=[]) assert task is not None assert task.status == "SUCCESS", task.traceback assert task.state == TaskState.SUCCESS assert task.info.get("status_message") == "No years provided, nothing to do." def test_initial_import(self, monkeypatch): self.mock_api_call(monkeypatch) # start initial import task = tasks.initial_sync_with_cisco_eox_api.delay(years_list=[2018, 2017]) expected_result = "The EoX data were successfully downloaded for the following years: 2018,2017" assert task is not None assert task.status == "SUCCESS", task.traceback assert task.state == TaskState.SUCCESS assert task.info.get("status_message") == expected_result assert Product.objects.count() == 3, "Three products are part of the update (mocked)" assert NotificationMessage.objects.filter(title="Initial data import finished").count() == 1 def test_initial_import_with_failed_api_query(self, monkeypatch): def raise_ciscoapicallfailed(): raise CiscoApiCallFailed("Cisco API call failed message") monkeypatch.setattr(utils, "check_cisco_eox_api_access", lambda x, y, z: True) monkeypatch.setattr( cisco_eox_api_crawler, "get_raw_api_data", lambda api_query=None, year=None: raise_ciscoapicallfailed() ) # test initial import task = tasks.initial_sync_with_cisco_eox_api.delay(years_list=[2018, 2017]) expected_status_message = "The EoX data were successfully downloaded for the following years: None " \ "(for 2018,2017 the synchronization failed)" assert task is not None assert task.status == "SUCCESS", task.traceback assert task.state == TaskState.SUCCESS assert task.info.get("status_message") == expected_status_message msg_count = NotificationMessage.objects.filter(title="Initial data import failed").count() assert msg_count == 2, "Message is created per year"
hoelsner/product-database
app/ciscoeox/tests/test_ciscoeox_tasks.py
app/ciscoeox/api_crawler.py
from django.core.cache import cache from django.db import models from django.db.models.signals import post_save, post_delete from django.dispatch import receiver class NotificationMessage(models.Model): """ Notifications from certain processes """ MESSAGE_ERROR = "ERR" MESSAGE_INFO = "INFO" MESSAGE_SUCCESS = "SUCC" MESSAGE_WARNING = "WARN" MESSAGE_TYPE = ( ("INFO", "info"), ("SUCC", "success"), ("ERR", "error"), ("WARN", "warning"), ) title = models.CharField( max_length=2048 ) type = models.CharField( max_length=8, choices=MESSAGE_TYPE, default=MESSAGE_INFO ) summary_message = models.TextField( max_length=16384 ) detailed_message = models.TextField( max_length=16384 ) created = models.DateTimeField( auto_now_add=True, editable=False ) @staticmethod def add_info_message(title, summary_message, detailed_message): NotificationMessage.objects.create( title=title, type=NotificationMessage.MESSAGE_INFO, summary_message=summary_message, detailed_message=detailed_message ) @staticmethod def add_success_message(title, summary_message, detailed_message): NotificationMessage.objects.create( title=title, type=NotificationMessage.MESSAGE_SUCCESS, summary_message=summary_message, detailed_message=detailed_message ) @staticmethod def add_warning_message(title, summary_message, detailed_message): NotificationMessage.objects.create( title=title, type=NotificationMessage.MESSAGE_WARNING, summary_message=summary_message, detailed_message=detailed_message ) @staticmethod def add_error_message(title, summary_message, detailed_message): NotificationMessage.objects.create( title=title, type=NotificationMessage.MESSAGE_ERROR, summary_message=summary_message, detailed_message=detailed_message ) def save(self, *args, **kwargs): # clean the object before save self.full_clean() super(NotificationMessage, self).save(*args, **kwargs) def __str__(self): return self.title class Meta: ordering = ('created',) class TextBlock(models.Model): """ Dynamic content at some page views """ TB_HOMEPAGE_TEXT_BEFORE_FAVORITE_ACTIONS = "Homepage text before favorite actions" TB_HOMEPAGE_TEXT_AFTER_FAVORITE_ACTIONS = "Homepage text after favorite actions" name = models.CharField( max_length=512, unique=True, verbose_name="Name", help_text="Internal name for the Text Block (predefined)", ) html_content = models.TextField( max_length=16384, verbose_name="HTML content", help_text="content of the text block (HTML possible)", null=True, blank=True ) def save(self, *args, **kwargs): # clean the object before save self.full_clean() super(TextBlock, self).save(*args, **kwargs) class ConfigOption(models.Model): GLOBAL_CISCO_API_ENABLED = "global.cisco_api_enabled" GLOBAL_LOGIN_ONLY_MODE = "global.login_only_mode" GLOBAL_INTERNAL_PRODUCT_ID_LABEL = "global.internal_product_id_label" CISCO_API_CLIENT_ID = "cisco_api.client_id" CISCO_API_CLIENT_SECRET = "cisco_api.client_secret" CISCO_EOX_CRAWLER_AUTO_SYNC = "cisco_eox.auto_sync" CISCO_EOX_CRAWLER_CREATE_PRODUCTS = "cisco_eox.create_products" CISCO_EOX_CRAWLER_LAST_EXECUTION_TIME = "cisco_eox.last_execution_time" CISCO_EOX_CRAWLER_LAST_EXECUTION_RESULT = "cisco_eox.last_execution_result" CISCO_EOX_API_QUERIES = "cisco_eox.api_queries" CISCO_EOX_PRODUCT_BLACKLIST_REGEX = "cisco_eox.product_blacklist_regex" CISCO_EOX_WAIT_TIME = "cisco_eox.wait_time_between_queries" STAT_AMOUNT_OF_PRODUCT_CHECKS = "statistics.amount_product_check_runs" STAT_AMOUNT_OF_UNIQUE_PRODUCT_CHECK_ENTRIES = "statistics.amount_unique_product_check_entries" key = models.CharField( max_length=256, unique=True ) value = models.CharField( max_length=8192, null=True, blank=True ) def save(self, *args, **kwargs): self.value = str(self.value).strip() if self.value else None self.key = self.key.strip() if self.key else None self.full_clean() super(ConfigOption, self).save(*args, **kwargs) def __str__(self): return self.key @receiver([post_save, post_delete], sender=NotificationMessage) def invalidate_notification_message_related_cache_values(sender, instance, **kwargs): """delete cache values that are somehow related to the Notification Message data model""" cache.delete("PDB_HOMEPAGE_CONTEXT")
""" Test suite for the ciscoeox.tasks module """ import datetime import pytest import os import requests from requests import Response from app.ciscoeox import tasks from app.ciscoeox.exception import CiscoApiCallFailed, CredentialsNotFoundException from app.config import utils from app.productdb import models as productdb_models from app.config.models import NotificationMessage from app.config.settings import AppSettings from app.productdb.models import Product, Vendor import app.ciscoeox.api_crawler as cisco_eox_api_crawler from django_project.celery import TaskState pytestmark = pytest.mark.django_db @pytest.fixture def use_test_api_configuration(): app = AppSettings() app.set_cisco_api_enabled(True) app.set_product_blacklist_regex("") app.set_cisco_eox_api_queries("") app.set_auto_create_new_products(True) app.set_periodic_sync_enabled(False) app.set_cisco_api_client_id(os.getenv("TEST_CISCO_API_CLIENT_ID", "dummy_id")) app.set_cisco_api_client_secret(os.getenv("TEST_CISCO_API_CLIENT_SECRET", "dummy_secret")) @pytest.mark.usefixtures("mock_cisco_api_authentication_server") @pytest.mark.usefixtures("use_test_api_configuration") @pytest.mark.usefixtures("set_celery_always_eager") @pytest.mark.usefixtures("redis_server_required") @pytest.mark.usefixtures("import_default_vendors") class TestExecuteTaskToSynchronizeCiscoEoxStateTask: def mock_api_call(self, monkeypatch): # mock the underlying API call class MockSession: def get(self, *args, **kwargs): r = Response() r.status_code = 200 with open("app/ciscoeox/tests/data/cisco_eox_response_page_1_of_1.json") as f: r._content = f.read().encode("utf-8") return r monkeypatch.setattr(requests, "Session", MockSession) def test_manual_task(self, monkeypatch): self.mock_api_call(monkeypatch) # try to execute it, while no auto-sync is enabled app = AppSettings() app.set_periodic_sync_enabled(False) app.set_cisco_eox_api_queries("WS-C2960-*") task = tasks.execute_task_to_synchronize_cisco_eox_states.delay(ignore_periodic_sync_flag=True) expected_result = '<p style="text-align: left;">The following queries were executed:<br>' \ '<ul style="text-align: left;"><li><code>WS-C2960-*</code> (<b>affects 3 products</b>, ' \ 'success)</li></ul></p>' assert task is not None assert task.status == "SUCCESS", task.traceback assert task.state == TaskState.SUCCESS assert task.info.get("status_message") == expected_result assert NotificationMessage.objects.count() == 1, "Task should create a Notification Message" assert Product.objects.count() == 3, "Three products are part of the update" # test update required p = Product.objects.get(product_id="WS-C2950G-24-EI") p.eox_update_time_stamp = datetime.date(1999, 1, 1) p.save() task = tasks.execute_task_to_synchronize_cisco_eox_states.delay(ignore_periodic_sync_flag=True) expected_result = '<p style="text-align: left;">The following queries were executed:<br>' \ '<ul style="text-align: left;"><li><code>WS-C2960-*</code> (<b>affects 3 products</b>, ' \ 'success)</li></ul></p>' assert task is not None assert task.status == "SUCCESS", task.traceback assert task.state == TaskState.SUCCESS assert task.info.get("status_message") == expected_result assert NotificationMessage.objects.count() == 2, "Task should create a Notification Message" assert Product.objects.count() == 3, "Three products are part of the update" def test_manual_task_with_single_blacklist_entry(self, monkeypatch): self.mock_api_call(monkeypatch) # try to execute it, while no auto-sync is enabled app = AppSettings() app.set_periodic_sync_enabled(False) app.set_cisco_eox_api_queries("WS-C2960-*") app.set_product_blacklist_regex("WS-C2950G-24-EI") task = tasks.execute_task_to_synchronize_cisco_eox_states.delay(ignore_periodic_sync_flag=True) expected_result = '<p style="text-align: left;">The following queries were executed:<br>' \ '<ul style="text-align: left;"><li><code>WS-C2960-*</code> (<b>affects 3 products</b>, ' \ 'success)</li></ul></p>' assert task is not None assert task.status == "SUCCESS", task.traceback assert task.state == TaskState.SUCCESS assert task.info.get("status_message") == expected_result assert NotificationMessage.objects.count() == 1, "Task should create a Notification Message" assert Product.objects.count() == 2 nm = NotificationMessage.objects.first() assert nm.type == NotificationMessage.MESSAGE_SUCCESS, "Incomplete configuration, should throw a warning " def test_manual_task_with_multiple_blacklist_entries(self, monkeypatch): self.mock_api_call(monkeypatch) # try to execute it, while no auto-sync is enabled app = AppSettings() app.set_periodic_sync_enabled(False) app.set_cisco_eox_api_queries("WS-C2960-*") app.set_product_blacklist_regex("WS-C2950G-48-EI-WS;WS-C2950G-24-EI") task = tasks.execute_task_to_synchronize_cisco_eox_states.delay(ignore_periodic_sync_flag=True) expected_result = [ '<p style="text-align: left;">The following queries were executed:<br>' '<ul style="text-align: left;"><li><code>WS-C2960-*</code> (<b>affects 3 products</b>, ' 'success)</li></ul>' ] assert task is not None assert task.status == "SUCCESS", task.traceback assert task.state == TaskState.SUCCESS for er in expected_result: assert er in task.info.get("status_message") assert NotificationMessage.objects.count() == 1, "Task should create a Notification Message" assert Product.objects.count() == 1, "Only a single product is imported" nm = NotificationMessage.objects.first() assert nm.type == NotificationMessage.MESSAGE_SUCCESS, "Incomplete configuration, should throw a warning " def test_periodic_task_without_queries(self, monkeypatch): self.mock_api_call(monkeypatch) # test automatic trigger app = AppSettings() app.set_periodic_sync_enabled(True) app.set_cisco_eox_api_queries("") task = tasks.execute_task_to_synchronize_cisco_eox_states.delay() assert task is not None assert task.status == "SUCCESS", task.traceback assert task.state == TaskState.SUCCESS assert task.info.get("status_message") == "No Cisco EoX API queries configured." assert NotificationMessage.objects.count() == 1, "Task should create a Notification Message" nm = NotificationMessage.objects.first() assert nm.type == NotificationMessage.MESSAGE_WARNING, "Incomplete configuration, should throw a warning " \ "message" def test_api_call_error(self, monkeypatch): # force API failure class MockSession: def get(self, *args, **kwargs): raise CiscoApiCallFailed("The API is broken") monkeypatch.setattr(requests, "Session", MockSession) # test automatic trigger app = AppSettings() app.set_periodic_sync_enabled(True) app.set_cisco_eox_api_queries("yxcz") task = tasks.execute_task_to_synchronize_cisco_eox_states.delay() expected_status_message = '<p style="text-align: left;">The following queries were executed:<br>' \ '<ul style="text-align: left;">' \ '<li class="text-danger"><code>yxcz</code> (failed, cannot contact API endpoint ' \ 'at https://api.cisco.com/supporttools/eox/rest/5/EOXByProductID/1/yxcz)</li>' \ '</ul></p>' assert task is not None assert task.status == "SUCCESS", task.traceback assert task.state == TaskState.SUCCESS assert task.info.get("status_message") == expected_status_message assert NotificationMessage.objects.count() == 1, "Task should create a Notification Message" nm = NotificationMessage.objects.first() assert nm.type == NotificationMessage.MESSAGE_ERROR, "Should be an error message, because all queries failed" def test_credentials_not_found(self, monkeypatch): # force API failure class MockSession: def get(self, *args, **kwargs): raise CredentialsNotFoundException("Something is wrong with the credentials handling") monkeypatch.setattr(requests, "Session", MockSession) # test automatic trigger app = AppSettings() app.set_periodic_sync_enabled(True) app.set_cisco_eox_api_queries("yxcz") task = tasks.execute_task_to_synchronize_cisco_eox_states.delay() expected_status_message = '<p style="text-align: left;">The following queries were executed:<br>' \ '<ul style="text-align: left;"><li class="text-danger"><code>yxcz</code> ' \ '(failed, cannot contact API endpoint at ' \ 'https://api.cisco.com/supporttools/eox/rest/5/EOXByProductID/1/yxcz)</li></ul></p>' assert task is not None assert task.status == "SUCCESS", task.traceback assert task.state == TaskState.SUCCESS assert task.info.get("status_message") == expected_status_message assert NotificationMessage.objects.count() == 1, "Task should create a Notification Message" nm = NotificationMessage.objects.first() assert nm.type == NotificationMessage.MESSAGE_ERROR, "Should be an error message, because all queries failed" def test_api_check_failed(self, monkeypatch): # force API failure class MockSession: def get(self, *args, **kwargs): raise Exception("The API is broken") monkeypatch.setattr(requests, "Session", MockSession) # test automatic trigger app = AppSettings() app.set_periodic_sync_enabled(True) app.set_cisco_eox_api_queries("yxcz") task = tasks.execute_task_to_synchronize_cisco_eox_states.delay() expected_status_message = "<p style=\"text-align: left;\">The following queries were executed:<br>" \ "<ul style=\"text-align: left;\"><li class=\"text-danger\"><code>yxcz</code> " \ "(failed, cannot contact API endpoint at " \ "https://api.cisco.com/supporttools/eox/rest/5/EOXByProductID/1/yxcz)</li></ul></p>" assert task is not None assert task.status == "SUCCESS", task.traceback assert task.state == TaskState.SUCCESS assert task.info.get("status_message") == expected_status_message assert NotificationMessage.objects.count() == 1, "Task should create a Notification Message" nm = NotificationMessage.objects.first() assert nm.type == NotificationMessage.MESSAGE_ERROR, "Should be an error message, because all queries failed" def test_periodic_task_enabled_state(self, monkeypatch): self.mock_api_call(monkeypatch) # test automatic trigger app = AppSettings() app.set_periodic_sync_enabled(False) task = tasks.execute_task_to_synchronize_cisco_eox_states.delay() assert task is not None assert task.status == "SUCCESS", task.traceback assert task.state == TaskState.SUCCESS assert task.info.get("status_message") == "task not enabled" app.set_periodic_sync_enabled(True) task = tasks.execute_task_to_synchronize_cisco_eox_states.delay() assert task is not None assert task.status == "SUCCESS", task.traceback assert task.state == TaskState.SUCCESS assert task.info.get("status_message") != "task not enabled" def test_execute_task_to_synchronize_cisco_eox_states_with_failed_api_query(self, monkeypatch): def raise_ciscoapicallfailed(): raise CiscoApiCallFailed("Cisco API call failed message") monkeypatch.setattr(utils, "check_cisco_eox_api_access", lambda x, y, z: True) monkeypatch.setattr( cisco_eox_api_crawler, "get_raw_api_data", lambda api_query=None, year=None: raise_ciscoapicallfailed() ) # test automatic trigger app = AppSettings() app.set_periodic_sync_enabled(True) app.set_cisco_eox_api_queries("yxcz") task = tasks.execute_task_to_synchronize_cisco_eox_states.delay() expected_status_message = "<p style=\"text-align: left;\">The following queries were executed:<br>" \ "<ul style=\"text-align: left;\"><li class=\"text-danger\">" \ "<code>yxcz</code> (failed, Cisco API call failed message)</li></ul></p>" assert task is not None assert task.status == "SUCCESS", task.traceback assert task.state == TaskState.SUCCESS assert task.info.get("status_message") == expected_status_message assert NotificationMessage.objects.count() == 1, "Task should create a Notification Message" nm = NotificationMessage.objects.first() assert nm.type == NotificationMessage.MESSAGE_ERROR, "Should be an error message, because all queries failed" @pytest.mark.usefixtures("set_celery_always_eager") @pytest.mark.usefixtures("redis_server_required") class TestPopulateProductLifecycleStateSyncTask: def test_no_cisco_vendor(self): result = tasks.cisco_eox_populate_product_lc_state_sync_field.delay() assert result.status == "SUCCESS" assert result.info == {"error": "Vendor \"Cisco Systems\" not found in database"} @pytest.mark.usefixtures("import_default_vendors") def test_no_cisco_products(self): result = tasks.cisco_eox_populate_product_lc_state_sync_field.delay() assert result.status == "SUCCESS" assert result.info == {"error": "No Products associated to \"Cisco Systems\" found in database"} @pytest.mark.usefixtures("import_default_vendors") def test_populate_flag_on_cisco_products(self): app_config = AppSettings() v = Vendor.objects.get(id=1) productdb_models.Product.objects.create(product_id="est", vendor=v, lc_state_sync=False) productdb_models.Product.objects.create(product_id="Test", vendor=v, lc_state_sync=False) productdb_models.Product.objects.create(product_id="TestA", vendor=v, lc_state_sync=False) productdb_models.Product.objects.create(product_id="TestB", vendor=v, lc_state_sync=False) productdb_models.Product.objects.create(product_id="TestC", vendor=v, lc_state_sync=False) productdb_models.Product.objects.create(product_id="ControlItem", vendor=v, lc_state_sync=False) # the following item is part of every query, but is never synced because of the wrong vendor productdb_models.Product.objects.create(product_id="Other ControlItem", lc_state_sync=False) app_config.set_cisco_eox_api_queries("") app_config.set_periodic_sync_enabled(True) result = tasks.cisco_eox_populate_product_lc_state_sync_field.delay() assert result.status == "SUCCESS" assert Product.objects.filter(lc_state_sync=True).count() == 0, "No queries configured" app_config.set_cisco_eox_api_queries("Test\nOther ControlItem") result = tasks.cisco_eox_populate_product_lc_state_sync_field.delay() filterquery = Product.objects.filter(lc_state_sync=True) assert result.status == "SUCCESS" assert list(filterquery.order_by("id").values_list("product_id", flat=True)) == ["Test"] app_config.set_cisco_eox_api_queries("Test*\nOther ControlItem") result = tasks.cisco_eox_populate_product_lc_state_sync_field.delay() filterquery = Product.objects.filter(lc_state_sync=True) expected_result_list = [ "Test", "TestA", "TestB", "TestC", ] assert result.status == "SUCCESS" assert list(filterquery.order_by("id").values_list("product_id", flat=True)) == expected_result_list app_config.set_cisco_eox_api_queries("*estB\nOther ControlItem") result = tasks.cisco_eox_populate_product_lc_state_sync_field.delay() filterquery = Product.objects.filter(lc_state_sync=True) expected_result_list = [ "TestB", ] assert result.status == "SUCCESS" assert list(filterquery.order_by("id").values_list("product_id", flat=True)) == expected_result_list app_config.set_cisco_eox_api_queries("*estB\nOther ControlItem") result = tasks.cisco_eox_populate_product_lc_state_sync_field.delay() filterquery = Product.objects.filter(lc_state_sync=True) expected_result_list = [ "TestB", ] assert result.status == "SUCCESS" assert list(filterquery.order_by("id").values_list("product_id", flat=True)) == expected_result_list app_config.set_cisco_eox_api_queries("*es*\nOther ControlItem") result = tasks.cisco_eox_populate_product_lc_state_sync_field.delay() filterquery = Product.objects.filter(lc_state_sync=True) expected_result_list = [ "est", "Test", "TestA", "TestB", "TestC", ] assert result.status == "SUCCESS" assert list(filterquery.order_by("id").values_list("product_id", flat=True)) == expected_result_list app_config.set_cisco_eox_api_queries("*es*\nOther ControlItem") app_config.set_periodic_sync_enabled(False) result = tasks.cisco_eox_populate_product_lc_state_sync_field.delay() filterquery = Product.objects.filter(lc_state_sync=True) assert result.status == "SUCCESS" assert filterquery.count() == 0, "Periodic sync disabled, no value should be true" @pytest.mark.usefixtures("mock_cisco_api_authentication_server") @pytest.mark.usefixtures("use_test_api_configuration") @pytest.mark.usefixtures("set_celery_always_eager") @pytest.mark.usefixtures("redis_server_required") @pytest.mark.usefixtures("import_default_vendors") class TestInitialSyncWithCiscoEoXApiTask: def mock_api_call(self, monkeypatch): # mock the underlying API call class MockSession: def get(self, *args, **kwargs): r = Response() r.status_code = 200 with open("app/ciscoeox/tests/data/cisco_eox_response_page_1_of_1.json") as f: r._content = f.read().encode("utf-8") return r monkeypatch.setattr(requests, "Session", MockSession) def test_initial_import_with_invalid_parameters(self): with pytest.raises(AttributeError) as ex: tasks.initial_sync_with_cisco_eox_api.delay(years_list="invalid parameter") assert ex.match("years_list must be a list") with pytest.raises(AttributeError) as ex: tasks.initial_sync_with_cisco_eox_api.delay(years_list=["12", "13"]) assert ex.match("years_list must be a list of integers") task = tasks.initial_sync_with_cisco_eox_api.delay(years_list=[]) assert task is not None assert task.status == "SUCCESS", task.traceback assert task.state == TaskState.SUCCESS assert task.info.get("status_message") == "No years provided, nothing to do." def test_initial_import(self, monkeypatch): self.mock_api_call(monkeypatch) # start initial import task = tasks.initial_sync_with_cisco_eox_api.delay(years_list=[2018, 2017]) expected_result = "The EoX data were successfully downloaded for the following years: 2018,2017" assert task is not None assert task.status == "SUCCESS", task.traceback assert task.state == TaskState.SUCCESS assert task.info.get("status_message") == expected_result assert Product.objects.count() == 3, "Three products are part of the update (mocked)" assert NotificationMessage.objects.filter(title="Initial data import finished").count() == 1 def test_initial_import_with_failed_api_query(self, monkeypatch): def raise_ciscoapicallfailed(): raise CiscoApiCallFailed("Cisco API call failed message") monkeypatch.setattr(utils, "check_cisco_eox_api_access", lambda x, y, z: True) monkeypatch.setattr( cisco_eox_api_crawler, "get_raw_api_data", lambda api_query=None, year=None: raise_ciscoapicallfailed() ) # test initial import task = tasks.initial_sync_with_cisco_eox_api.delay(years_list=[2018, 2017]) expected_status_message = "The EoX data were successfully downloaded for the following years: None " \ "(for 2018,2017 the synchronization failed)" assert task is not None assert task.status == "SUCCESS", task.traceback assert task.state == TaskState.SUCCESS assert task.info.get("status_message") == expected_status_message msg_count = NotificationMessage.objects.filter(title="Initial data import failed").count() assert msg_count == 2, "Message is created per year"
hoelsner/product-database
app/ciscoeox/tests/test_ciscoeox_tasks.py
app/config/models.py
# This file is part of Shuup. # # Copyright (c) 2012-2017, Shoop Commerce Ltd. All rights reserved. # # This source code is licensed under the OSL-3.0 license found in the # LICENSE file in the root directory of this source tree. from shuup.core.pricing import TaxfulPrice, TaxlessPrice class TaxedPrice(object): """ Price with calculated taxes. .. attribute:: taxful (`~shuup.core.pricing.TaxfulPrice`) Price including taxes. .. attribute:: taxless (`~shuup.core.pricing.TaxlessPrice`) Pretax price. .. attribute:: taxes (`list[shuup.core.taxing.LineTax]`) List of taxes applied to the price. """ def __init__(self, taxful, taxless, taxes=None): """ Initialize from given prices and taxes. :type taxful: shuup.core.pricing.TaxfulPrice :param taxful: Price including taxes. :type taxless: shuup.core.pricing.TaxlessPrice :param taxless: Pretax price. :type taxes: list[shuup.core.taxing.LineTax]|None :param taxes: List of taxes applied to the price. """ assert isinstance(taxful, TaxfulPrice) assert isinstance(taxless, TaxlessPrice) self.taxful = taxful self.taxless = taxless self.taxes = taxes or [] # Validation expected_taxful_amount = taxless.amount + self.tax_amount assert abs(taxful.amount - expected_taxful_amount).value < 0.00001 @property def tax_amount(self): """ Total amount of applied taxes. """ zero = self.taxful.new(0).amount return sum((x.amount for x in self.taxes), zero) @property def tax_rate(self): """ Tax rate calculated from taxful and taxless amounts. """ return (self.taxful.amount / self.taxless.amount) - 1
# This file is part of Shuup. # # Copyright (c) 2012-2017, Shoop Commerce Ltd. All rights reserved. # # This source code is licensed under the OSL-3.0 license found in the # LICENSE file in the root directory of this source tree. from __future__ import unicode_literals from collections import defaultdict from decimal import Decimal import pytest from django.test.utils import override_settings from shuup.core.models import OrderLineType, Tax, TaxClass from shuup.core.order_creator import OrderSource from shuup.core.pricing import TaxlessPrice from shuup.core.taxing import TaxSummary from shuup.default_tax.models import TaxRule from shuup.testing.factories import ( create_product, get_payment_method, get_shipping_method, get_shop ) from shuup.utils import babel_precision_provider from shuup.utils.money import Money, set_precision_provider from shuup.utils.numbers import bankers_round def setup_module(module): # uses the get_precision to avoid db hits set_precision_provider(babel_precision_provider.get_precision) global settings_overrider settings_overrider = override_settings( SHUUP_CALCULATE_TAXES_AUTOMATICALLY_IF_POSSIBLE=False) settings_overrider.__enter__() def teardown_module(module): settings_overrider.__exit__(None, None, None) @pytest.mark.django_db @pytest.mark.parametrize("taxes", ['taxful', 'taxless']) def test_1prod(taxes): source = create_order_source( prices_include_tax=(taxes == 'taxful'), line_data=[ 'product: P1 |price: 200.00|qty: 1|discount: 0.00|tax: A', 'discount: SALE |price: -20.00|qty: 1|discount: 20.00'], tax_rates={'A': ['0.25']}) source.calculate_taxes() assert source.total_price.value == 180 assert get_price_by_tax_class(source) == {'TC-A': 200, '': -20} if taxes == 'taxful': # Name Rate Base amount Tax amount Taxful assert get_pretty_tax_summary(source) == [ 'Tax-A 0.25 144.000000000 36.000000000 180.000000000'] assert get_pretty_line_taxes_of_discount_lines(source) == [[ 'Tax-A 0.25 -16.000000000 -4.000000000 -20.000000000']] else: assert get_pretty_tax_summary(source) == [ 'Tax-A 0.25 180.000000000 45.000000000 225.000000000'] assert get_pretty_line_taxes_of_discount_lines(source) == [[ 'Tax-A 0.25 -20.000000000 -5.000000000 -25.000000000']] @pytest.mark.django_db @pytest.mark.parametrize("taxes", ['taxful', 'taxless']) def test_2prods(taxes): source = create_order_source( prices_include_tax=(taxes == 'taxful'), line_data=[ 'product: P1 |price: 10.00|qty: 1|discount: 0.00|tax: A', 'product: P2 |price: 20.00|qty: 1|discount: 0.00|tax: B', 'discount: SALE |price: -3.00|qty: 1|discount: 3.00'], tax_rates={'A': ['0.25'], 'B': ['0.20']}) source.calculate_taxes() assert source.total_price.value == 27 assert get_price_by_tax_class(source) == {'TC-A': 10, 'TC-B': 20, '': -3} if taxes == 'taxful': # Name Rate Base amount Tax amount Taxful assert get_pretty_tax_summary(source) == [ 'Tax-A 0.25 7.200000000 1.800000000 9.000000000', 'Tax-B 0.20 15.000000000 3.000000000 18.000000000', 'Total 0.22 22.200000000 4.800000000 27.000000000'] assert get_pretty_line_taxes_of_discount_lines(source) == [[ 'Tax-A 0.25 -0.800000000 -0.200000000 -1.000000000', 'Tax-B 0.20 -1.666666667 -0.333333333 -2.000000000']] else: assert get_pretty_tax_summary(source) == [ 'Tax-A 0.25 9.000000000 2.250000000 11.250000000', 'Tax-B 0.20 18.000000000 3.600000000 21.600000000', 'Total 0.22 27.000000000 5.850000000 32.850000000'] assert get_pretty_line_taxes_of_discount_lines(source) == [[ 'Tax-A 0.25 -1.000000000 -0.250000000 -1.250000000', 'Tax-B 0.20 -2.000000000 -0.400000000 -2.400000000']] @pytest.mark.django_db @pytest.mark.parametrize("taxes", ['taxful', 'taxless']) def test_2prods_2taxrates(taxes): source = create_order_source( prices_include_tax=(taxes == 'taxful'), line_data=[ 'product: P1 |price: 10.00|qty: 1|discount: 0.00|tax: A', 'product: P2 |price: 20.00|qty: 1|discount: 0.00|tax: B', 'discount: SALE |price: -3.00|qty: 1|discount: 3.00'], tax_rates={'A': ['0.20', '0.05'], 'B': ['0.15', '0.05']}) source.calculate_taxes() assert source.total_price.value == 27 assert get_price_by_tax_class(source) == {'TC-A': 10, 'TC-B': 20, '': -3} if taxes == 'taxful': # Name Rate Base amount Tax amount Taxful assert get_pretty_tax_summary(source) == [ 'Tax-A-1 0.20 7.200000000 1.440000000 8.640000000', 'Tax-B-1 0.15 15.000000000 2.250000000 17.250000000', 'Tax-A-2 0.05 7.200000000 0.360000000 7.560000000', 'Tax-B-2 0.05 15.000000000 0.750000000 15.750000000', 'Total 0.22 22.200000000 4.800000000 27.000000000'] assert get_pretty_line_taxes_of_discount_lines(source) == [[ 'Tax-A-1 0.20 -0.800000000 -0.160000000 -0.960000000', 'Tax-A-2 0.05 -0.800000000 -0.040000000 -0.840000000', 'Tax-B-1 0.15 -1.666666667 -0.250000000 -1.916666667', 'Tax-B-2 0.05 -1.666666667 -0.083333333 -1.750000000']] else: assert get_pretty_tax_summary(source) == [ 'Tax-A-1 0.20 9.000000000 1.800000000 10.800000000', 'Tax-B-1 0.15 18.000000000 2.700000000 20.700000000', 'Tax-A-2 0.05 9.000000000 0.450000000 9.450000000', 'Tax-B-2 0.05 18.000000000 0.900000000 18.900000000', 'Total 0.22 27.000000000 5.850000000 32.850000000'] assert get_pretty_line_taxes_of_discount_lines(source) == [[ 'Tax-A-1 0.20 -1.000000000 -0.200000000 -1.200000000', 'Tax-A-2 0.05 -1.000000000 -0.050000000 -1.050000000', 'Tax-B-1 0.15 -2.000000000 -0.300000000 -2.300000000', 'Tax-B-2 0.05 -2.000000000 -0.100000000 -2.100000000']] @pytest.mark.django_db @pytest.mark.parametrize("taxes", ['taxful', 'taxless']) def test_3prods_with_services(taxes): source = create_order_source( prices_include_tax=(taxes == 'taxful'), line_data=[ 'product: P1 |price: 10.00|qty: 2|discount: 2.00|tax: A', 'product: P2 |price: 40.00|qty: 8|discount: 0.00|tax: B', 'product: P3 |price: 60.00|qty: 6|discount: 12.00|tax: C', 'payment: Invoice |price: 2.50|qty: 1|discount: 0.00|tax: A', 'shipping: Ship |price: 7.50|qty: 1|discount: 0.00|tax: A', 'discount: SALE |price: -30.00|qty: 1|discount: 30.00'], tax_rates={'A': ['0.25'], 'B': ['0.15'], 'C': ['0.30']}) source.calculate_taxes() assert source.total_price.value == 90 assert get_price_by_tax_class(source) == { 'TC-A': 20, 'TC-B': 40, 'TC-C': 60, '': -30} if taxes == 'taxful': # Name Rate Base amount Tax amount Taxful assert get_pretty_tax_summary(source) == [ 'Tax-C 0.30 34.610000000 10.384615385 45.000000000', 'Tax-A 0.25 12.000000000 3.000000000 15.000000000', 'Tax-B 0.15 26.080000000 3.913043478 30.000000000', 'Total 0.24 72.700000000 17.297658863 90.000000000'] assert get_pretty_line_taxes_of_discount_lines(source) == [[ 'Tax-A 0.25 -4.000000000 -1.000000000 -5.000000000', 'Tax-B 0.15 -8.695652174 -1.304347826 -10.000000000', 'Tax-C 0.30 -11.538461538 -3.461538462 -15.000000000']] else: assert get_pretty_tax_summary(source) == [ 'Tax-C 0.30 45.000000000 13.500000000 58.500000000', 'Tax-A 0.25 15.000000000 3.750000000 18.750000000', 'Tax-B 0.15 30.000000000 4.500000000 34.500000000', 'Total 0.24 90.000000000 21.750000000 111.750000000'] assert get_pretty_line_taxes_of_discount_lines(source) == [[ 'Tax-A 0.25 -5.000000000 -1.250000000 -6.250000000', 'Tax-B 0.15 -10.000000000 -1.500000000 -11.500000000', 'Tax-C 0.30 -15.000000000 -4.500000000 -19.500000000']] @pytest.mark.django_db @pytest.mark.parametrize("taxes", ['taxful', 'taxless']) def test_3prods_services_2discounts(taxes): source = create_order_source( prices_include_tax=(taxes == 'taxful'), line_data=[ 'product: P1 |price: 24.00|qty: 2|discount: 0.00|tax: A', 'product: P2 |price: 100.00|qty: 8|discount: 0.00|tax: B', 'product: P3 |price: 80.00|qty: 5|discount: 5.00|tax: C', 'payment: Invoice |price: 6.25|qty: 1|discount: 0.00|tax: B', 'shipping: Ship |price: 18.75|qty: 1|discount: 2.50|tax: B', 'discount: SALE1 |price: -22.90|qty: 1|discount: 22.90', 'discount: SALE2 |price: -11.45|qty: 1|discount: 11.45'], tax_rates={'A': ['0.25'], 'B': ['0.20'], 'C': ['0.50']}) source.calculate_taxes() _check_taxful_price(source) _check_taxless_price(source) assert source.total_price.value == Decimal('194.65') assert get_price_by_tax_class(source) == { 'TC-A': 24, 'TC-B': 125, 'TC-C': 80, '': Decimal('-34.35')} if taxes == 'taxful': # Name Rate Base amount Tax amount Taxful assert get_pretty_tax_summary(source) == [ 'Tax-C 0.50 45.330000000 22.666666667 68.000000000', 'Tax-A 0.25 16.320000000 4.080000000 20.400000000', 'Tax-B 0.20 88.530000000 17.708333333 106.250000000', 'Total 0.30 150.180000000 44.455000000 194.650000000'] assert get_pretty_line_taxes_of_discount_lines(source) == [ ['Tax-A 0.25 -1.920000000 -0.480000000 -2.400000000', 'Tax-B 0.20 -10.416666667 -2.083333333 -12.500000000', 'Tax-C 0.50 -5.333333333 -2.666666667 -8.000000000'], ['Tax-A 0.25 -0.960000000 -0.240000000 -1.200000000', 'Tax-B 0.20 -5.208333333 -1.041666667 -6.250000000', 'Tax-C 0.50 -2.666666667 -1.333333333 -4.000000000']] else: assert get_pretty_tax_summary(source) == [ 'Tax-C 0.50 68.000000000 34.000000000 102.000000000', 'Tax-A 0.25 20.400000000 5.100000000 25.500000000', 'Tax-B 0.20 106.250000000 21.250000000 127.500000000', 'Total 0.31 194.650000000 60.350000000 255.000000000'] assert get_pretty_line_taxes_of_discount_lines(source) == [ ['Tax-A 0.25 -2.400000000 -0.600000000 -3.000000000', 'Tax-B 0.20 -12.500000000 -2.500000000 -15.000000000', 'Tax-C 0.50 -8.000000000 -4.000000000 -12.000000000'], ['Tax-A 0.25 -1.200000000 -0.300000000 -1.500000000', 'Tax-B 0.20 -6.250000000 -1.250000000 -7.500000000', 'Tax-C 0.50 -4.000000000 -2.000000000 -6.000000000']] @pytest.mark.django_db @pytest.mark.parametrize("taxes", ['taxful', 'taxless']) def test_3prods_services_2discounts2(taxes): source = create_order_source( prices_include_tax=(taxes == 'taxful'), line_data=[ 'product: P1 |price: 30.00|qty: 2|discount: 0.00|tax: A', 'product: P2 |price: 120.00|qty: 8|discount: 0.00|tax: B', 'product: P3 |price: 120.00|qty: 5|discount: 5.00|tax: C', 'payment: Invoice |price: 7.50|qty: 1|discount: 0.00|tax: B', 'shipping: Ship |price: 22.50|qty: 1|discount: 2.50|tax: B', 'discount: SALE1 |price: -30.00|qty: 1|discount: 30.00', 'discount: SALE2 |price: -15.00|qty: 1|discount: 15.00'], tax_rates={'A': ['0.25'], 'B': ['0.20'], 'C': ['0.50']}) source.calculate_taxes() _check_taxful_price(source) _check_taxless_price(source) assert source.total_price.value == 255 assert get_price_by_tax_class(source) == { 'TC-A': 30, 'TC-B': 150, 'TC-C': 120, '': -45} if taxes == 'taxful': # Name Rate Base amount Tax amount Taxful assert get_pretty_tax_summary(source) == [ 'Tax-C 0.50 68.000000000 34.000000000 102.000000000', 'Tax-A 0.25 20.400000000 5.100000000 25.500000000', 'Tax-B 0.20 106.250000000 21.250000000 127.500000000', 'Total 0.31 194.650000000 60.350000000 255.000000000'] assert get_pretty_line_taxes_of_discount_lines(source) == [ ['Tax-A 0.25 -2.400000000 -0.600000000 -3.000000000', 'Tax-B 0.20 -12.500000000 -2.500000000 -15.000000000', 'Tax-C 0.50 -8.000000000 -4.000000000 -12.000000000'], ['Tax-A 0.25 -1.200000000 -0.300000000 -1.500000000', 'Tax-B 0.20 -6.250000000 -1.250000000 -7.500000000', 'Tax-C 0.50 -4.000000000 -2.000000000 -6.000000000']] else: assert get_pretty_tax_summary(source) == [ 'Tax-C 0.50 102.000000000 51.000000000 153.000000000', 'Tax-A 0.25 25.500000000 6.375000000 31.880000000', 'Tax-B 0.20 127.500000000 25.500000000 153.000000000', 'Total 0.32 255.000000000 82.875000000 337.870000000'] assert get_pretty_line_taxes_of_discount_lines(source) == [ ['Tax-A 0.25 -3.000000000 -0.750000000 -3.750000000', 'Tax-B 0.20 -15.000000000 -3.000000000 -18.000000000', 'Tax-C 0.50 -12.000000000 -6.000000000 -18.000000000'], ['Tax-A 0.25 -1.500000000 -0.375000000 -1.875000000', 'Tax-B 0.20 -7.500000000 -1.500000000 -9.000000000', 'Tax-C 0.50 -6.000000000 -3.000000000 -9.000000000']] @pytest.mark.django_db @pytest.mark.parametrize("taxes", ['taxful', 'taxless']) def test_all_discounted(taxes): source = create_order_source( prices_include_tax=(taxes == 'taxful'), line_data=[ 'product: P1 |price: 30.00|qty: 1|discount: 0.00|tax: A', 'product: P2 |price: 60.00|qty: 1|discount: 0.00|tax: B', 'discount: SALE |price: -90.00|qty: 1|discount: 90.00'], tax_rates={'A': ['0.20'], 'B': ['0.10']}) source.calculate_taxes() _check_taxful_price(source) _check_taxless_price(source) assert source.total_price.value == 0 assert get_price_by_tax_class(source) == {'TC-A': 30, 'TC-B': 60, '': -90} if taxes == 'taxful': # Name Rate Base amount Tax amount Taxful assert get_pretty_tax_summary(source) == [ 'Tax-A 0.20 0.000000000 0.000000000 0.000000000', 'Tax-B 0.10 0.000000000 0.000000000 0.000000000', 'Total 0.00 0.000000000 0.000000000 0.000000000'] assert get_pretty_line_taxes_of_discount_lines(source) == [[ 'Tax-A 0.20 -25.000000000 -5.000000000 -30.000000000', 'Tax-B 0.10 -54.545454545 -5.454545455 -60.000000000']] else: assert get_pretty_tax_summary(source) == [ 'Tax-A 0.20 0.000000000 0.000000000 0.000000000', 'Tax-B 0.10 0.000000000 0.000000000 0.000000000', 'Total 0.00 0.000000000 0.000000000 0.000000000'] assert get_pretty_line_taxes_of_discount_lines(source) == [[ 'Tax-A 0.20 -30.000000000 -6.000000000 -36.000000000', 'Tax-B 0.10 -60.000000000 -6.000000000 -66.000000000']] @pytest.mark.django_db @pytest.mark.parametrize("taxes", ['taxful', 'taxless']) def test_zero_priced_products(taxes): source = create_order_source( prices_include_tax=(taxes == 'taxful'), line_data=[ 'product: P1 |price: 0.00|qty: 1|discount: 0.00|tax: A', 'product: P2 |price: 0.00|qty: 1|discount: 0.00|tax: B', 'discount: Foobar |price: 10.00|qty: 1|discount:-10.00'], tax_rates={'A': ['0.25'], 'B': ['0.20']}) source.calculate_taxes() _check_taxful_price(source) _check_taxless_price(source) assert source.total_price.value == 10 assert get_price_by_tax_class(source) == {'TC-A': 0, 'TC-B': 0, '': 10} # Name Rate Base amount Tax amount Taxful assert get_pretty_tax_summary(source) == [ 'Tax-A 0.25 0.000000000 0.000000000 0.000000000', 'Tax-B 0.20 0.000000000 0.000000000 0.000000000', 'Untaxed 0.00 10.000000000 0.000000000 10.000000000', 'Total 0.00 10.000000000 0.000000000 10.000000000'] assert get_pretty_line_taxes_of_discount_lines(source) == [[]] @pytest.mark.django_db @pytest.mark.parametrize("taxes", ['taxful', 'taxless']) def test_no_products(taxes): source = create_order_source( prices_include_tax=(taxes == 'taxful'), line_data=[ 'discount: Foobar |price: 50.00|qty: 1|discount:-50.00', 'discount: SALE |price: -10.00|qty: 1|discount: 10.00'], tax_rates={}) source.calculate_taxes() _check_taxful_price(source) _check_taxless_price(source) assert source.total_price.value == 40 assert get_price_by_tax_class(source) == {'': 40} # Name Rate Base amount Tax amount Taxful assert get_pretty_tax_summary(source) == [ 'Untaxed 0.00 40.000000000 0.000000000 40.000000000'] assert get_pretty_line_taxes_of_discount_lines(source) == [[], []] # ================================================================ # Creating test data # ================================================================ class Line(object): def __init__(self, price, quantity=1, discount=0, **kwargs): self.price = Decimal(price) self.quantity = Decimal(quantity) self.discount = Decimal(discount) self.__dict__.update(kwargs) self.base_unit_price = (self.price + self.discount) / self.quantity self.is_product = ('product_sku' in kwargs) self.is_payment = ('payment_name' in kwargs) self.is_shipping = ('shipping_name' in kwargs) self.is_discount = ('discount_text' in kwargs) @classmethod def from_text(cls, text): preparsed = (item.split(':') for item in text.split('|')) data = {x[0].strip(): x[1].strip() for x in preparsed} mappings = [ ('product', 'product_sku'), ('payment', 'payment_name'), ('shipping', 'shipping_name'), ('discount', 'discount_text'), ('qty', 'quantity'), ('disc', 'discount'), ('tax', 'tax_name'), ] for (old_key, new_key) in mappings: if old_key in data: data[new_key] = data[old_key] del data[old_key] return cls(**data) def create_order_source(prices_include_tax, line_data, tax_rates): """ Get order source with some testing data. :rtype: OrderSource """ lines = [Line.from_text(x) for x in line_data] shop = get_shop(prices_include_tax, currency='USD') tax_classes = create_assigned_tax_classes(tax_rates) products = create_products(shop, lines, tax_classes) services = create_services(shop, lines, tax_classes) source = OrderSource(shop) fill_order_source(source, lines, products, services) return source def create_assigned_tax_classes(tax_rates): return { tax_name: create_assigned_tax_class(tax_name, rates_to_assign) for (tax_name, rates_to_assign) in tax_rates.items() } def create_assigned_tax_class(name, rates_to_assign): """ Create a tax class and assign taxes for it with tax rules. """ tax_class = TaxClass.objects.create(name='TC-%s' % name) for (n, tax_rate) in enumerate(rates_to_assign, 1): tax_name = ( 'Tax-%s' % name if len(rates_to_assign) == 1 else 'Tax-%s-%d' % (name, n)) tax = Tax.objects.create(rate=tax_rate, name=tax_name) TaxRule.objects.create(tax=tax).tax_classes.add(tax_class) return tax_class def create_products(shop, lines, tax_classes): return { line.product_sku: create_product( line.product_sku, shop, default_price=line.base_unit_price, tax_class=tax_classes[line.tax_name] ) for line in lines if line.is_product } def create_services(shop, lines, tax_classes): def service_name(line): return 'payment_method' if line.is_payment else 'shipping_method' return { service_name(line): create_service(shop, line, tax_classes) for line in lines if (line.is_payment or line.is_shipping) } def create_service(shop, line, tax_classes): assert line.quantity == 1 and line.discount == 0 if line.is_payment: meth = get_payment_method( shop=shop, price=line.price, name=line.payment_name) elif line.is_shipping: meth = get_shipping_method( shop=shop, price=line.price, name=line.shipping_name) meth.tax_class = tax_classes[line.tax_name] meth.save() return meth def fill_order_source(source, lines, products, services): for line in lines: if line.is_product: source.add_line( product=products[line.product_sku], quantity=line.quantity, base_unit_price=source.create_price(line.base_unit_price), discount_amount=source.create_price(line.discount), ) elif line.is_payment: source.payment_method = services['payment_method'] elif line.is_shipping: source.shipping_method = services['shipping_method'] elif line.is_discount: source.add_line( type=OrderLineType.DISCOUNT, quantity=line.quantity, base_unit_price=source.create_price(line.base_unit_price), discount_amount=source.create_price(line.discount), text=line.discount_text, ) # ================================================================ # Getting and formatting results # ================================================================ def get_price_by_tax_class(source): price_by_tax_class = defaultdict(Decimal) for line in source.get_final_lines(with_taxes=False): tax_class_name = (line.tax_class.name if line.tax_class else '') price_by_tax_class[tax_class_name] += line.price.value return price_by_tax_class TAX_DISTRIBUTION_LINE_FORMAT = ( '{n:7s} {r:4.2f} {ba:14.9f} {a:14.9f} {t:14.9f}') def get_pretty_tax_summary(source): summary = get_tax_summary(source) lines = [ TAX_DISTRIBUTION_LINE_FORMAT.format( n=line.tax_name, r=line.tax_rate, ba=line.based_on, a=line.tax_amount, t=line.taxful) for line in summary] total_base = source.taxless_total_price.value total_taxful = source.taxful_total_price.value total_tax_amount = sum(x.tax_amount.value for x in summary) # Like it was read from db total_line = TAX_DISTRIBUTION_LINE_FORMAT.format( n="Total", r=((total_tax_amount / total_base) if abs(total_base) > 0.0001 else 0), ba=total_base, a=total_tax_amount, t=total_taxful) return lines + ([total_line] if len(summary) > 1 else []) def get_tax_summary(source): """ Get tax summary of given source lines. :type source: OrderSource :type lines: list[SourceLine] :rtype: TaxSummary """ all_line_taxes = [] untaxed = TaxlessPrice(source.create_price(0).amount) for line in source.get_final_lines(): line_taxes = list(line.taxes) all_line_taxes.extend(line_taxes) if not line_taxes: untaxed += line.taxless_price return TaxSummary.from_line_taxes(all_line_taxes, untaxed) def get_pretty_line_taxes_of_discount_lines(source): return [ prettify_line_taxes(line) for line in source.get_final_lines() if line.tax_class is None] def prettify_line_taxes(line): return [ TAX_DISTRIBUTION_LINE_FORMAT.format( n=line_tax.name, r=line_tax.rate, ba=line_tax.base_amount, a=line_tax.amount, t=(line_tax.base_amount + line_tax.amount)) for line_tax in sorted(line.taxes, key=(lambda x: x.name))] def _check_taxless_price(source): for line in source.get_lines(): from_components = bankers_round(line.taxless_base_unit_price*line.quantity - line.taxless_discount_amount, 2) assert from_components == line.taxless_price assert line.price == line.base_unit_price * line.quantity - line.discount_amount assert line.price == line.base_unit_price * line.quantity - line.discount_amount assert line.discount_amount == line.base_price - line.price if line.base_price: assert line.discount_rate == (1 - (line.price / line.base_price)) assert line.discount_percentage == 100 * (1 - (line.price / line.base_price)) if line.quantity: assert line.unit_discount_amount == line.discount_amount / line.quantity def _check_taxful_price(source): for line in source.get_lines(): from_components = bankers_round(line.taxful_base_unit_price*line.quantity - line.taxful_discount_amount, 2) assert from_components == line.taxful_price assert line.price == line.base_unit_price * line.quantity - line.discount_amount assert line.discount_amount == line.base_price - line.price if line.base_price: assert line.discount_rate == (1 - (line.price / line.base_price)) assert line.discount_percentage == 100 * (1 - (line.price / line.base_price)) if line.quantity: assert line.unit_discount_amount == line.discount_amount / line.quantity
suutari-ai/shoop
shuup_tests/functional/test_discount_taxing.py
shuup/core/taxing/_price.py
""" Quick Dial Usage: quickdial quickdial -h | --help quickdial -v | --version -h --help show this -v --version show version """ from nekrobox.docdecs import params from . import gateaddr from . import pathing from . import __version__ @params(origin=(int, "Point of origin symbol"), length=(int, "Length of address excluding origin"), count=(int, "Number of addresses to generate"), symbols=(str, "Symbols for pretty printing"), returns=(tuple, "The shortest distance and corresponding address")) def calculate(origin=None, length=6, count=1000, symbols="ABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789"): """Generate a mass of addresses and find the fastest address to dial.""" symbolcount = len(symbols) shortest = (float('Inf'), []) for address in gateaddr.generate(origin, count, length, symbolcount): distance = pathing.address_distance(address, symbolcount) if distance < shortest[0]: shortest = (distance, address) return shortest def main(argv=None): vstr = "Quick Dial Gate Address Calculator v{0}" arguments = docopt(__doc__, version=vstr.format(__version__), argv=argv) if __name__ == "__main__": main()
""" Tests for `quickdial` module. """ import pytest from quickdial import quickdial class TestQuickdial(object): @classmethod def setup_class(cls): pass def test_something(self): pass @classmethod def teardown_class(cls): pass
Nekroze/quickdial
test/test_quickdial.py
quickdial/quickdial.py
# -*- mode: python; coding: utf-8 -*- # Copyright (c) 2018 Radio Astronomy Software Group # Licensed under the 2-clause BSD License """Class for reading FHD save files.""" import numpy as np import warnings from scipy.io.idl import readsav from astropy import constants as const from .uvdata import UVData from .. import utils as uvutils from .. import telescopes as uvtel __all__ = ["get_fhd_history", "FHD"] def get_fhd_history(settings_file, return_user=False): """ Small function to get the important history from an FHD settings text file. Includes information about the command line call, the user, machine name and date Parameters ---------- settings_file : str FHD settings file name return_user : bool optionally return the username who ran FHD Returns ------- history : str string of history extracted from the settings file user : str Only returned if return_user is True """ with open(settings_file, "r") as f: settings_lines = f.readlines() main_loc = None command_loc = None obs_loc = None user_line = None for ind, line in enumerate(settings_lines): if line.startswith("##MAIN"): main_loc = ind if line.startswith("##COMMAND_LINE"): command_loc = ind if line.startswith("##OBS"): obs_loc = ind if line.startswith("User"): user_line = ind if ( main_loc is not None and command_loc is not None and obs_loc is not None and user_line is not None ): break main_lines = settings_lines[main_loc + 1 : command_loc] command_lines = settings_lines[command_loc + 1 : obs_loc] history_lines = ["FHD history\n"] + main_lines + command_lines for ind, line in enumerate(history_lines): history_lines[ind] = line.rstrip().replace("\t", " ") history = "\n".join(history_lines) user = settings_lines[user_line].split()[1] if return_user: return history, user else: return history class FHD(UVData): """ Defines a FHD-specific subclass of UVData for reading FHD save files. This class should not be interacted with directly, instead use the read_fhd method on the UVData class. """ def _latlonalt_close(self, latlonalt1, latlonalt2): radian_tols = self._phase_center_ra.tols loc_tols = self._telescope_location.tols latlon_close = np.allclose( np.array(latlonalt1[0:2]), np.array(latlonalt2[0:2]), rtol=radian_tols[0], atol=radian_tols[1], ) alt_close = np.isclose( latlonalt1[2], latlonalt2[2], rtol=loc_tols[0], atol=loc_tols[1] ) if latlon_close and alt_close: return True else: return False def _xyz_close(self, xyz1, xyz2): loc_tols = self._telescope_location.tols return np.allclose(xyz1, xyz2, rtol=loc_tols[0], atol=loc_tols[1]) def read_fhd( self, filelist, use_model=False, background_lsts=True, read_data=True, run_check=True, check_extra=True, run_check_acceptability=True, strict_uvw_antpos_check=False, ): """ Read in data from a list of FHD files. Parameters ---------- filelist : array_like of str The list/array of FHD save files to read from. Must include at least one polarization file, a params file and a flag file. An obs file is also required if `read_data` is False. use_model : bool Option to read in the model visibilities rather than the dirty visibilities (the default is False, meaning the dirty visibilities will be read). background_lsts : bool When set to True, the lst_array is calculated in a background thread. read_data : bool Read in the visibility, nsample and flag data. If set to False, only the metadata will be read in. Setting read_data to False results in a metadata only object. If read_data is False, an obs file must be included in the filelist. Note that if read_data is False, Npols is derived from the obs file and reflects the number of polarizations used in the FHD run. If read_data is True, Npols is given by the number of visibility data files provided in `filelist`. run_check : bool Option to check for the existence and proper shapes of parameters after after reading in the file (the default is True, meaning the check will be run). check_extra : bool Option to check optional parameters as well as required ones (the default is True, meaning the optional parameters will be checked). run_check_acceptability : bool Option to check acceptable range of the values of parameters after reading in the file (the default is True, meaning the acceptable range check will be done). strict_uvw_antpos_check : bool Option to raise an error rather than a warning if the check that uvws match antenna positions does not pass. Raises ------ IOError If root file directory doesn't exist. ValueError If required files are missing or multiple files for any polarization are included in filelist. If there is no recognized key for visibility weights in the flags_file. """ datafiles = {} params_file = None obs_file = None flags_file = None layout_file = None settings_file = None if use_model: data_name = "_vis_model_" else: data_name = "_vis_" for file in filelist: if file.lower().endswith(data_name + "xx.sav"): if "xx" in list(datafiles.keys()): raise ValueError("multiple xx datafiles in filelist") datafiles["xx"] = file elif file.lower().endswith(data_name + "yy.sav"): if "yy" in list(datafiles.keys()): raise ValueError("multiple yy datafiles in filelist") datafiles["yy"] = file elif file.lower().endswith(data_name + "xy.sav"): if "xy" in list(datafiles.keys()): raise ValueError("multiple xy datafiles in filelist") datafiles["xy"] = file elif file.lower().endswith(data_name + "yx.sav"): if "yx" in list(datafiles.keys()): raise ValueError("multiple yx datafiles in filelist") datafiles["yx"] = file elif file.lower().endswith("_params.sav"): if params_file is not None: raise ValueError("multiple params files in filelist") params_file = file elif file.lower().endswith("_obs.sav"): if obs_file is not None: raise ValueError("multiple obs files in filelist") obs_file = file elif file.lower().endswith("_flags.sav"): if flags_file is not None: raise ValueError("multiple flags files in filelist") flags_file = file elif file.lower().endswith("_layout.sav"): if layout_file is not None: raise ValueError("multiple layout files in filelist") layout_file = file elif file.lower().endswith("_settings.txt"): if settings_file is not None: raise ValueError("multiple settings files in filelist") settings_file = file else: # this is reached in tests but marked as uncovered because # CPython's peephole optimizer replaces a jump to a continue # with a jump to the top of the loop continue # pragma: no cover if len(datafiles) < 1 and read_data is True: raise ValueError( "No data files included in file list and read_data is True." ) if obs_file is None and read_data is False: raise ValueError( "No obs file included in file list and read_data is False." ) if params_file is None: raise ValueError("No params file included in file list") if flags_file is None: raise ValueError("No flags file included in file list") if layout_file is None: warnings.warn( "No layout file included in file list, " "antenna_postions will not be defined." ) if settings_file is None: warnings.warn("No settings file included in file list") if not read_data: obs_dict = readsav(obs_file, python_dict=True) this_obs = obs_dict["obs"] self.Npols = int(this_obs[0]["N_POL"]) else: # TODO: add checking to make sure params, flags and datafiles are # consistent with each other vis_data = {} for pol, file in datafiles.items(): this_dict = readsav(file, python_dict=True) if use_model: vis_data[pol] = this_dict["vis_model_ptr"] else: vis_data[pol] = this_dict["vis_ptr"] this_obs = this_dict["obs"] self.Npols = len(list(vis_data.keys())) obs = this_obs bl_info = obs["BASELINE_INFO"][0] astrometry = obs["ASTR"][0] fhd_pol_list = [] for pol in obs["POL_NAMES"][0]: fhd_pol_list.append(pol.decode("utf8").lower()) params_dict = readsav(params_file, python_dict=True) params = params_dict["params"] if read_data: flag_file_dict = readsav(flags_file, python_dict=True) # The name for this variable changed recently (July 2016). Test for both. vis_weights_data = {} if "flag_arr" in flag_file_dict: weights_key = "flag_arr" elif "vis_weights" in flag_file_dict: weights_key = "vis_weights" else: raise ValueError( "No recognized key for visibility weights in flags_file." ) for index, w in enumerate(flag_file_dict[weights_key]): vis_weights_data[fhd_pol_list[index]] = w self.Ntimes = int(obs["N_TIME"][0]) self.Nbls = int(obs["NBASELINES"][0]) self.Nblts = params["UU"][0].size self.Nfreqs = int(obs["N_FREQ"][0]) self.Nspws = 1 self.spw_array = np.array([0]) self.vis_units = "JY" # bl_info.JDATE (a vector of length Ntimes) is the only safe date/time # to use in FHD files. # (obs.JD0 (float) and params.TIME (vector of length Nblts) are # context dependent and are not safe # because they depend on the phasing of the visibilities) # the values in bl_info.JDATE are the JD for each integration. # We need to expand up to Nblts. int_times = list(uvutils._get_iterable(bl_info["JDATE"][0])) bin_offset = bl_info["BIN_OFFSET"][0] if self.Ntimes != len(int_times): warnings.warn( "Ntimes does not match the number of unique times in the data" ) self.time_array = np.zeros(self.Nblts) if self.Ntimes == 1: self.time_array.fill(int_times[0]) else: for ii in range(0, len(int_times)): if ii < (len(int_times) - 1): self.time_array[bin_offset[ii] : bin_offset[ii + 1]] = int_times[ii] else: self.time_array[bin_offset[ii] :] = int_times[ii] # this is generated in FHD by subtracting the JD of neighboring # integrations. This can have limited accuracy, so it can be slightly # off the actual value. # (e.g. 1.999426... rather than 2) time_res = obs["TIME_RES"] # time_res is constrained to be a scalar currently self.integration_time = ( np.ones_like(self.time_array, dtype=np.float64) * time_res[0] ) # # --- observation information --- self.telescope_name = obs["INSTRUMENT"][0].decode("utf8") # This is a bit of a kludge because nothing like object_name exists # in FHD files. # At least for the MWA, obs.ORIG_PHASERA and obs.ORIG_PHASEDEC specify # the field the telescope was nominally pointing at # (May need to be revisited, but probably isn't too important) self.object_name = ( "Field RA(deg): " + str(obs["ORIG_PHASERA"][0]) + ", Dec:" + str(obs["ORIG_PHASEDEC"][0]) ) # For the MWA, this can sometimes be converted to EoR fields if self.telescope_name.lower() == "mwa": if np.isclose(obs["ORIG_PHASERA"][0], 0) and np.isclose( obs["ORIG_PHASEDEC"][0], -27 ): self.object_name = "EoR 0 Field" self.instrument = self.telescope_name latitude = np.deg2rad(float(obs["LAT"][0])) longitude = np.deg2rad(float(obs["LON"][0])) altitude = float(obs["ALT"][0]) # get the stuff FHD read from the antenna table (in layout file) if layout_file is not None: layout_dict = readsav(layout_file, python_dict=True) layout = layout_dict["layout"] layout_fields = [name.lower() for name in layout.dtype.names] # Try to get the telescope location from the layout file & # compare it to the position from the obs structure. arr_center = layout["array_center"][0] layout_fields.remove("array_center") xyz_telescope_frame = layout["coordinate_frame"][0].decode("utf8").lower() layout_fields.remove("coordinate_frame") if xyz_telescope_frame == "itrf": # compare to lat/lon/alt location_latlonalt = uvutils.XYZ_from_LatLonAlt( latitude, longitude, altitude ) latlonalt_arr_center = uvutils.LatLonAlt_from_XYZ( arr_center, check_acceptability=run_check_acceptability ) # check both lat/lon/alt and xyz because of subtle differences # in tolerances if self._xyz_close( location_latlonalt, arr_center ) or self._latlonalt_close( (latitude, longitude, altitude), latlonalt_arr_center ): self.telescope_location = arr_center else: # values do not agree with each other to within the tolerances. # this is a known issue with FHD runs on cotter uvfits # files for the MWA # compare with the known_telescopes values telescope_obj = uvtel.get_telescope(self.telescope_name) # start warning message message = ( "Telescope location derived from obs lat/lon/alt " "values does not match the location in the layout file." ) if telescope_obj is not False: if self._latlonalt_close( (latitude, longitude, altitude), telescope_obj.telescope_location_lat_lon_alt, ): # obs lat/lon/alt matches known_telescopes message += ( " Value from obs lat/lon/alt matches the " "known_telescopes values, using them." ) self.telescope_location = location_latlonalt elif self._xyz_close( arr_center, telescope_obj.telescope_location ): # layout xyz matches known_telescopes message += ( " Value from the layout file matches the " "known_telescopes values, using them." ) self.telescope_location = arr_center else: # None of the values match each other. Defaulting # to known_telescopes value. message += ( " Neither location matches the values " "in known_telescopes. Defaulting to " "using the known_telescopes values." ) self.telescope_location = telescope_obj.telescope_location else: message += ( " Telescope is not in known_telescopes. " "Defaulting to using the obs derived values." ) self.telescope_location = location_latlonalt # issue warning warnings.warn(message) else: self.telescope_location_lat_lon_alt = (latitude, longitude, altitude) self.antenna_positions = layout["antenna_coords"][0] layout_fields.remove("antenna_coords") self.antenna_names = [ ant.decode("utf8").strip() for ant in layout["antenna_names"][0].tolist() ] layout_fields.remove("antenna_names") # make these 0-indexed (rather than one indexed) self.antenna_numbers = layout["antenna_numbers"][0] - 1 layout_fields.remove("antenna_numbers") self.Nants_telescope = int(layout["n_antenna"][0]) layout_fields.remove("n_antenna") if self.telescope_name.lower() == "mwa": # check that obs.baseline_info.tile_names match the antenna names # this only applies for MWA because the tile_names come from # metafits files obs_tile_names = [ ant.decode("utf8").strip() for ant in bl_info["TILE_NAMES"][0].tolist() ] obs_tile_names = [ "Tile" + "0" * (3 - len(ant)) + ant for ant in obs_tile_names ] # tile_names are assumed to be ordered: so their index gives # the antenna number # make an comparison array from self.antenna_names ordered this way. ant_names = np.zeros((np.max(self.antenna_numbers) + 1), str).tolist() for index, number in enumerate(self.antenna_numbers): ant_names[number] = self.antenna_names[index] if obs_tile_names != ant_names: warnings.warn( "tile_names from obs structure does not match " "antenna_names from layout" ) self.gst0 = float(layout["gst0"][0]) layout_fields.remove("gst0") if layout["ref_date"][0] != "": self.rdate = layout["ref_date"][0].decode("utf8").lower() layout_fields.remove("ref_date") self.earth_omega = float(layout["earth_degpd"][0]) layout_fields.remove("earth_degpd") self.dut1 = float(layout["dut1"][0]) layout_fields.remove("dut1") self.timesys = layout["time_system"][0].decode("utf8").upper().strip() layout_fields.remove("time_system") if "diameters" in layout_fields: self.timesys = layout["time_system"][0].decode("utf8").upper().strip() layout_fields.remove("diameters") # ignore some fields, put everything else in extra_keywords layout_fields_ignore = [ "diff_utc", "pol_type", "n_pol_cal_params", "mount_type", "axis_offset", "pola", "pola_orientation", "pola_cal_params", "polb", "polb_orientation", "polb_cal_params", "beam_fwhm", ] for field in layout_fields_ignore: if field in layout_fields: layout_fields.remove(field) for field in layout_fields: keyword = field if len(keyword) > 8: keyword = field.replace("_", "") value = layout[field][0] if isinstance(value, bytes): value = value.decode("utf8") self.extra_keywords[keyword.upper()] = value else: self.telescope_location_lat_lon_alt = (latitude, longitude, altitude) self.antenna_names = [ ant.decode("utf8").strip() for ant in bl_info["TILE_NAMES"][0].tolist() ] if self.telescope_name.lower() == "mwa": self.antenna_names = [ "Tile" + "0" * (3 - len(ant)) + ant for ant in self.antenna_names ] self.Nants_telescope = len(self.antenna_names) self.antenna_numbers = np.arange(self.Nants_telescope) try: self.set_telescope_params() except ValueError as ve: warnings.warn(str(ve)) # need to make sure telescope location is defined properly before this call proc = self.set_lsts_from_time_array(background=background_lsts) if not np.isclose(obs["OBSRA"][0], obs["PHASERA"][0]) or not np.isclose( obs["OBSDEC"][0], obs["PHASEDEC"][0] ): warnings.warn( "These visibilities may have been phased " "improperly -- without changing the uvw locations" ) self._set_phased() self.phase_center_ra_degrees = np.float(obs["OBSRA"][0]) self.phase_center_dec_degrees = np.float(obs["OBSDEC"][0]) self.phase_center_epoch = astrometry["EQUINOX"][0] # Note that FHD antenna arrays are 1-indexed so we subtract 1 # to get 0-indexed arrays self.ant_1_array = bl_info["TILE_A"][0] - 1 self.ant_2_array = bl_info["TILE_B"][0] - 1 self.Nants_data = int(np.union1d(self.ant_1_array, self.ant_2_array).size) self.baseline_array = self.antnums_to_baseline( self.ant_1_array, self.ant_2_array ) if self.Nbls != len(np.unique(self.baseline_array)): warnings.warn( "Nbls does not match the number of unique baselines in the data" ) # TODO: Spw axis to be collapsed in future release self.freq_array = np.zeros((1, len(bl_info["FREQ"][0])), dtype=np.float_) self.freq_array[0, :] = bl_info["FREQ"][0] self.channel_width = float(obs["FREQ_RES"][0]) # In FHD, uvws are in seconds not meters. # FHD follows the FITS uvw direction convention, which is opposite # ours and Miriad's. # So conjugate the visibilities and flip the uvws: self.uvw_array = np.zeros((self.Nblts, 3)) self.uvw_array[:, 0] = (-1) * params["UU"][0] * const.c.to("m/s").value self.uvw_array[:, 1] = (-1) * params["VV"][0] * const.c.to("m/s").value self.uvw_array[:, 2] = (-1) * params["WW"][0] * const.c.to("m/s").value lin_pol_order = ["xx", "yy", "xy", "yx"] linear_pol_dict = dict(zip(lin_pol_order, np.arange(5, 9) * -1)) pol_list = [] if read_data: for pol in lin_pol_order: if pol in vis_data: pol_list.append(linear_pol_dict[pol]) self.polarization_array = np.asarray(pol_list) else: # Use Npols because for FHD, npol fully specifies which pols to use pol_strings = lin_pol_order[: self.Npols] self.polarization_array = np.asarray( [linear_pol_dict[pol] for pol in pol_strings] ) # history: add the first few lines from the settings file if settings_file is not None: self.history = get_fhd_history(settings_file) else: self.history = "" if not uvutils._check_history_version(self.history, self.pyuvdata_version_str): self.history += self.pyuvdata_version_str if read_data: # TODO: Spw axis to be collapsed in future release self.data_array = np.zeros( (self.Nblts, 1, self.Nfreqs, self.Npols), dtype=np.complex_ ) # TODO: Spw axis to be collapsed in future release self.nsample_array = np.zeros( (self.Nblts, 1, self.Nfreqs, self.Npols), dtype=np.float_ ) # TODO: Spw axis to be collapsed in future release self.flag_array = np.zeros( (self.Nblts, 1, self.Nfreqs, self.Npols), dtype=np.bool_ ) for pol, vis in vis_data.items(): pol_i = pol_list.index(linear_pol_dict[pol]) # FHD follows the FITS uvw direction convention, which is opposite # ours and Miriad's. # So conjugate the visibilities and flip the uvws: self.data_array[:, 0, :, pol_i] = np.conj(vis) self.flag_array[:, 0, :, pol_i] = vis_weights_data[pol] <= 0 self.nsample_array[:, 0, :, pol_i] = np.abs(vis_weights_data[pol]) # wait for LSTs if set in background if proc is not None: proc.join() # check if object has all required uv_properties set if run_check: self.check( check_extra=check_extra, run_check_acceptability=run_check_acceptability, strict_uvw_antpos_check=strict_uvw_antpos_check, )
# -*- mode: python; coding: utf-8 -*- # Copyright (c) 2018 Radio Astronomy Software Group # Licensed under the 2-clause BSD License """Tests for FHD object. """ import pytest import os import glob import numpy as np from shutil import copyfile from pyuvdata import UVData import pyuvdata.utils as uvutils import pyuvdata.tests as uvtest from pyuvdata.data import DATA_PATH # set up FHD file list testdir = os.path.join(DATA_PATH, "fhd_vis_data/") testfile_prefix = "1061316296_" # note: 1061316296_obs.sav isn't used -- it's there to test handling of unneeded files testfile_suffix = [ "flags.sav", "vis_XX.sav", "params.sav", "vis_YY.sav", "vis_model_XX.sav", "vis_model_YY.sav", "layout.sav", "settings.txt", "obs.sav", ] testfiles = [] for s in testfile_suffix: testfiles.append(testdir + testfile_prefix + s) @pytest.fixture(scope="session") def fhd_data(): fhd_uv = UVData() fhd_uv.read(testfiles) return fhd_uv @pytest.fixture(scope="session") def fhd_model(): fhd_uv = UVData() fhd_uv.read(testfiles, use_model=True) return fhd_uv @pytest.mark.filterwarnings("ignore:Telescope location derived from obs") @pytest.mark.filterwarnings("ignore:The uvw_array does not match the expected values") def test_read_fhd_write_read_uvfits(fhd_data, tmp_path): """ FHD to uvfits loopback test. Read in FHD files, write out as uvfits, read back in and check for object equality. """ fhd_uv = fhd_data uvfits_uv = UVData() outfile = str(tmp_path / "outtest_FHD_1061316296.uvfits") fhd_uv.write_uvfits( outfile, spoof_nonessential=True, ) uvfits_uv.read_uvfits(outfile) assert fhd_uv == uvfits_uv @pytest.mark.filterwarnings("ignore:Telescope location derived from obs") @pytest.mark.filterwarnings("ignore:The uvw_array does not match the expected values") def test_read_fhd_metadata_only(fhd_data): fhd_uv = UVData() fhd_uv.read_fhd(testfiles, read_data=False) assert fhd_uv.metadata_only fhd_uv2 = fhd_data fhd_uv3 = fhd_uv2.copy(metadata_only=True) assert fhd_uv == fhd_uv3 def test_read_fhd_metadata_only_error(): fhd_uv = UVData() with pytest.raises( ValueError, match="No obs file included in file list and read_data is False." ): fhd_uv.read_fhd(testfiles[:7], read_data=False) @pytest.mark.filterwarnings("ignore:The uvw_array does not match the expected values") def test_read_fhd_select(): """ test select on read with FHD files. Read in FHD files with generic read & select on read, compare to read fhd files then do select """ fhd_uv = UVData() fhd_uv2 = UVData() with uvtest.check_warnings( UserWarning, [ 'Warning: select on read keyword set, but file_type is "fhd" which ' "does not support select on read. Entire file will be read and then " "select will be performed", "Telescope location derived from obs lat/lon/alt values does not " "match the location in the layout file. Value from obs lat/lon/alt " "matches the known_telescopes values, using them.", "The uvw_array does not match the expected values given the antenna " "positions.", "The uvw_array does not match the expected values given the antenna " "positions.", ], ): fhd_uv2.read(testfiles, freq_chans=np.arange(2)) with uvtest.check_warnings( UserWarning, [ "Telescope location derived from obs lat/lon/alt values does not " "match the location in the layout file. Value from obs lat/lon/alt " "matches the known_telescopes values, using them.", "The uvw_array does not match the expected values given the antenna " "positions.", ], ): fhd_uv.read(testfiles) fhd_uv.select(freq_chans=np.arange(2)) assert fhd_uv == fhd_uv2 def test_read_fhd_write_read_uvfits_no_layout(): """ Test errors/warnings with with no layout file. """ fhd_uv = UVData() files_use = testfiles[:-3] + [testfiles[-2]] # check warning raised with uvtest.check_warnings(UserWarning, "No layout file"): fhd_uv.read(files_use, run_check=False) with pytest.raises( ValueError, match="Required UVParameter _antenna_positions has not been set" ): with uvtest.check_warnings(UserWarning, "No layout file"): fhd_uv.read(files_use) @pytest.mark.filterwarnings("ignore:Telescope location derived from obs") @pytest.mark.filterwarnings("ignore:The uvw_array does not match the expected values") def test_read_fhd_write_read_uvfits_variant_flag(tmp_path): """ FHD to uvfits loopback test with variant flag file. Read in FHD files, write out as uvfits, read back in and check for object equality. """ fhd_uv = UVData() uvfits_uv = UVData() variant_flag_file = testdir + testfile_prefix + "variant_flags.sav" files_use = testfiles[1:] + [variant_flag_file] fhd_uv.read(files_use) outfile = str(tmp_path / "outtest_FHD_1061316296.uvfits") fhd_uv.write_uvfits( outfile, spoof_nonessential=True, ) uvfits_uv.read_uvfits(outfile) assert fhd_uv == uvfits_uv @pytest.mark.filterwarnings("ignore:The uvw_array does not match the expected values") def test_read_fhd_write_read_uvfits_fix_layout(tmp_path): """ FHD to uvfits loopback test with fixed array center layout file. Read in FHD files, write out as uvfits, read back in and check for object equality. """ fhd_uv = UVData() uvfits_uv = UVData() layout_fixed_file = testdir + testfile_prefix + "fixed_arr_center_layout.sav" files_use = testfiles[0:6] + [layout_fixed_file, testfiles[7]] fhd_uv.read(files_use) outfile = str(tmp_path / "outtest_FHD_1061316296.uvfits") fhd_uv.write_uvfits( outfile, spoof_nonessential=True, ) uvfits_uv.read_uvfits(outfile) assert fhd_uv == uvfits_uv @pytest.mark.filterwarnings("ignore:The uvw_array does not match the expected values") def test_read_fhd_write_read_uvfits_fix_layout_bad_obs_loc(tmp_path): """ FHD to uvfits loopback test with fixed array center layout file, bad obs location. Read in FHD files, write out as uvfits, read back in and check for object equality. """ fhd_uv = UVData() uvfits_uv = UVData() bad_obs_loc_file = testdir + testfile_prefix + "bad_obs_loc_vis_XX.sav" layout_fixed_file = testdir + testfile_prefix + "fixed_arr_center_layout.sav" files_use = [ testfiles[0], testfiles[2], bad_obs_loc_file, layout_fixed_file, testfiles[7], ] messages = [ "Telescope location derived from obs", "tile_names from obs structure does not match", "The uvw_array does not match the expected values given the antenna " "positions.", ] with uvtest.check_warnings(UserWarning, messages): fhd_uv.read(files_use) outfile = str(tmp_path / "outtest_FHD_1061316296.uvfits") fhd_uv.write_uvfits( outfile, spoof_nonessential=True, ) uvfits_uv.read_uvfits(outfile) assert fhd_uv == uvfits_uv @pytest.mark.filterwarnings("ignore:The uvw_array does not match the expected values") def test_read_fhd_write_read_uvfits_bad_obs_loc(tmp_path): """ FHD to uvfits loopback test with bad obs location (and bad layout location). Read in FHD files, write out as uvfits, read back in and check for object equality. """ fhd_uv = UVData() uvfits_uv = UVData() bad_obs_loc_file = testdir + testfile_prefix + "bad_obs_loc_vis_XX.sav" files_use = [ testfiles[0], testfiles[2], bad_obs_loc_file, testfiles[6], testfiles[7], ] messages = [ "Telescope location derived from obs", "tile_names from obs structure does not match", "The uvw_array does not match the expected values given the antenna " "positions.", ] with uvtest.check_warnings(UserWarning, messages): fhd_uv.read(files_use) outfile = str(tmp_path / "outtest_FHD_1061316296.uvfits") fhd_uv.write_uvfits( outfile, spoof_nonessential=True, ) uvfits_uv.read_uvfits(outfile) assert fhd_uv == uvfits_uv @pytest.mark.filterwarnings("ignore:The uvw_array does not match the expected values") def test_read_fhd_write_read_uvfits_altered_layout(tmp_path): """ FHD to uvfits loopback test with altered layout file. Read in FHD files, write out as uvfits, read back in and check for object equality. """ fhd_uv = UVData() uvfits_uv = UVData() # bad layout structure values altered_layout_file = testdir + testfile_prefix + "broken_layout.sav" files_use = testfiles[0:6] + [altered_layout_file, testfiles[7]] fhd_uv.read(files_use) outfile = str(tmp_path / "outtest_FHD_1061316296.uvfits") fhd_uv.write_uvfits( outfile, spoof_nonessential=True, ) uvfits_uv.read_uvfits(outfile) assert fhd_uv == uvfits_uv @pytest.mark.filterwarnings("ignore:The uvw_array does not match the expected values") def test_read_fhd_write_read_uvfits_no_settings(tmp_path): """ FHD to uvfits loopback test with no settings file. Read in FHD files, write out as uvfits, read back in and check for object equality. """ fhd_uv = UVData() uvfits_uv = UVData() messages = [ "No settings file included in file list", "Telescope location derived from obs lat/lon/alt values does " "not match the location in the layout file. Value from obs " "lat/lon/alt matches the known_telescopes values, using them.", "The uvw_array does not match the expected values given the antenna positions.", ] with uvtest.check_warnings(UserWarning, messages): fhd_uv.read(testfiles[:-2]) # Check only pyuvdata history with no settings file assert fhd_uv.history == fhd_uv.pyuvdata_version_str outfile = str(tmp_path / "outtest_FHD_1061316296.uvfits") fhd_uv.write_uvfits( outfile, spoof_nonessential=True, ) uvfits_uv.read_uvfits(outfile) assert fhd_uv == uvfits_uv def test_break_read_fhd(): """Try various cases of incomplete file lists.""" fhd_uv = UVData() # missing flags with pytest.raises(ValueError, match="No flags file included in file list"): fhd_uv.read(testfiles[1:]) # Missing params subfiles = [item for sublist in [testfiles[0:2], testfiles[3:]] for item in sublist] with pytest.raises(ValueError, match="No params file included in file list"): fhd_uv.read(subfiles) # No data files with pytest.raises( ValueError, match="No data files included in file list and read_data is True.", ): fhd_uv.read(["foo.sav"]) def test_read_fhd_warnings(): """Test warnings with various broken inputs.""" # bad obs structure values broken_data_file = testdir + testfile_prefix + "broken_vis_XX.sav" bad_filelist = [ testfiles[0], testfiles[2], broken_data_file, testfiles[6], testfiles[7], ] warn_messages = [ "Ntimes does not match", "Telescope location derived from obs", "Telescope foo is not in known_telescopes.", "These visibilities may have been phased improperly", "Nbls does not match", ] fhd_uv = UVData() with uvtest.check_warnings(UserWarning, warn_messages): fhd_uv.read(bad_filelist, run_check=False) # bad flag file broken_flag_file = testdir + testfile_prefix + "broken_flags.sav" bad_filelist = testfiles[1:] + [broken_flag_file] fhd_uv = UVData() with pytest.raises( ValueError, match="No recognized key for visibility weights in flags_file." ): fhd_uv.read(bad_filelist) @pytest.mark.parametrize( "new_file_end,file_copy,message", [ (["extra_vis_XX.sav"], testfiles[1], "multiple xx datafiles in filelist"), (["extra_vis_YY.sav"], testfiles[3], "multiple yy datafiles in filelist"), ( ["vis_XY.sav", "extra_vis_XY.sav"], testfiles[1], "multiple xy datafiles in filelist", ), ( ["vis_YX.sav", "extra_vis_YX.sav"], testfiles[1], "multiple yx datafiles in filelist", ), (["extra_params.sav"], testfiles[2], "multiple params files in filelist"), (["extra_obs.sav"], testfiles[8], "multiple obs files in filelist"), (["extra_flags.sav"], testfiles[0], "multiple flags files in filelist"), (["extra_layout.sav"], testfiles[6], "multiple layout files in filelist"), (["extra_settings.txt"], testfiles[7], "multiple settings files in filelist"), ], ) def test_read_fhd_extra_files(new_file_end, file_copy, message): # try cases with extra files of each type new_files = [] for file_end in new_file_end: extra_file = testdir + testfile_prefix + file_end new_files.append(extra_file) copyfile(file_copy, extra_file) fhd_uv = UVData() with pytest.raises(ValueError, match=message): fhd_uv.read(testfiles + new_files) for extra_file in new_files: os.remove(extra_file) @pytest.mark.filterwarnings("ignore:Telescope location derived from obs") @pytest.mark.filterwarnings("ignore:The uvw_array does not match the expected values") def test_read_fhd_model(tmp_path, fhd_model): """FHD to uvfits loopback test with model visibilities.""" fhd_uv = fhd_model uvfits_uv = UVData() outfile = str(tmp_path / "outtest_FHD_1061316296_model.uvfits") fhd_uv.write_uvfits( outfile, spoof_nonessential=True, ) uvfits_uv.read_uvfits(outfile) assert fhd_uv == uvfits_uv @pytest.mark.filterwarnings("ignore:Telescope location derived from obs") @pytest.mark.filterwarnings("ignore:The uvw_array does not match the expected values") def test_multi_files(fhd_model): """Read multiple files at once.""" fhd_uv1 = UVData() fhd_uv2 = UVData() test1 = list(np.array(testfiles)[[0, 1, 2, 4, 6, 7]]) test2 = list(np.array(testfiles)[[0, 2, 3, 5, 6, 7]]) fhd_uv1.read(np.array([test1, test2]), use_model=True, file_type="fhd") fhd_uv2 = fhd_model assert uvutils._check_histories( fhd_uv2.history + " Combined data " "along polarization axis using pyuvdata.", fhd_uv1.history, ) fhd_uv1.history = fhd_uv2.history assert fhd_uv1 == fhd_uv2 @pytest.mark.filterwarnings("ignore:Telescope location derived from obs") @pytest.mark.filterwarnings("ignore:The uvw_array does not match the expected values") def test_multi_files_axis(fhd_model): """Read multiple files at once with axis keyword.""" fhd_uv1 = UVData() fhd_uv2 = UVData() test1 = list(np.array(testfiles)[[0, 1, 2, 4, 6, 7]]) test2 = list(np.array(testfiles)[[0, 2, 3, 5, 6, 7]]) fhd_uv1.read(np.array([test1, test2]), use_model=True, axis="polarization") fhd_uv2 = fhd_model assert uvutils._check_histories( fhd_uv2.history + " Combined data " "along polarization axis using pyuvdata.", fhd_uv1.history, ) fhd_uv1.history = fhd_uv2.history assert fhd_uv1 == fhd_uv2 def test_single_time(): """ test reading in a file with a single time. """ single_time_filelist = glob.glob(os.path.join(DATA_PATH, "refsim1.1_fhd/*")) fhd_uv = UVData() with uvtest.check_warnings( UserWarning, [ "Telescope gaussian is not in known_telescopes.", "The uvw_array does not match the expected values given the antenna " "positions.", ], ): fhd_uv.read(single_time_filelist) assert np.unique(fhd_uv.time_array).size == 1 def test_conjugation(): """ test uvfits vs fhd conjugation """ uvfits_file = os.path.join(DATA_PATH, "ref_1.1_uniform.uvfits") fhd_filelist = glob.glob(os.path.join(DATA_PATH, "refsim1.1_fhd/*")) uvfits_uv = UVData() uvfits_uv.read(uvfits_file) fhd_uv = UVData() with uvtest.check_warnings( UserWarning, [ "Telescope gaussian is not in known_telescopes.", "The uvw_array does not match the expected values given the antenna " "positions.", ], ): fhd_uv.read(fhd_filelist) uvfits_uv.select(polarizations=fhd_uv.polarization_array) assert uvfits_uv._uvw_array == fhd_uv._uvw_array assert uvfits_uv._data_array == fhd_uv._data_array
HERA-Team/pyuvdata
pyuvdata/uvdata/tests/test_fhd.py
pyuvdata/uvdata/fhd.py
""" masked_reductions.py is for reduction algorithms using a mask-based approach for missing values. """ from typing import Callable import numpy as np from pandas._libs import missing as libmissing from pandas.compat.numpy import np_version_under1p17 from pandas.core.nanops import check_below_min_count def _sumprod( func: Callable, values: np.ndarray, mask: np.ndarray, skipna: bool = True, min_count: int = 0, ): """ Sum or product for 1D masked array. Parameters ---------- func : np.sum or np.prod values : np.ndarray Numpy array with the values (can be of any dtype that support the operation). mask : np.ndarray Boolean numpy array (True values indicate missing values). skipna : bool, default True Whether to skip NA. min_count : int, default 0 The required number of valid values to perform the operation. If fewer than ``min_count`` non-NA values are present the result will be NA. """ if not skipna: if mask.any() or check_below_min_count(values.shape, None, min_count): return libmissing.NA else: return func(values) else: if check_below_min_count(values.shape, mask, min_count): return libmissing.NA if np_version_under1p17: return func(values[~mask]) else: return func(values, where=~mask) def sum(values: np.ndarray, mask: np.ndarray, skipna: bool = True, min_count: int = 0): return _sumprod( np.sum, values=values, mask=mask, skipna=skipna, min_count=min_count ) def prod(values: np.ndarray, mask: np.ndarray, skipna: bool = True, min_count: int = 0): return _sumprod( np.prod, values=values, mask=mask, skipna=skipna, min_count=min_count ) def _minmax(func: Callable, values: np.ndarray, mask: np.ndarray, skipna: bool = True): """ Reduction for 1D masked array. Parameters ---------- func : np.min or np.max values : np.ndarray Numpy array with the values (can be of any dtype that support the operation). mask : np.ndarray Boolean numpy array (True values indicate missing values). skipna : bool, default True Whether to skip NA. """ if not skipna: if mask.any() or not values.size: # min/max with empty array raise in numpy, pandas returns NA return libmissing.NA else: return func(values) else: subset = values[~mask] if subset.size: return func(subset) else: # min/max with empty array raise in numpy, pandas returns NA return libmissing.NA def min(values: np.ndarray, mask: np.ndarray, skipna: bool = True): return _minmax(np.min, values=values, mask=mask, skipna=skipna) def max(values: np.ndarray, mask: np.ndarray, skipna: bool = True): return _minmax(np.max, values=values, mask=mask, skipna=skipna)
import numpy as np import pytest import pandas as pd from pandas import DataFrame, Index, date_range import pandas._testing as tm @pytest.mark.parametrize("func", ["ffill", "bfill"]) def test_groupby_column_index_name_lost_fill_funcs(func): # GH: 29764 groupby loses index sometimes df = pd.DataFrame( [[1, 1.0, -1.0], [1, np.nan, np.nan], [1, 2.0, -2.0]], columns=pd.Index(["type", "a", "b"], name="idx"), ) df_grouped = df.groupby(["type"])[["a", "b"]] result = getattr(df_grouped, func)().columns expected = pd.Index(["a", "b"], name="idx") tm.assert_index_equal(result, expected) @pytest.mark.parametrize("func", ["ffill", "bfill"]) def test_groupby_fill_duplicate_column_names(func): # GH: 25610 ValueError with duplicate column names df1 = pd.DataFrame({"field1": [1, 3, 4], "field2": [1, 3, 4]}) df2 = pd.DataFrame({"field1": [1, np.nan, 4]}) df_grouped = pd.concat([df1, df2], axis=1).groupby(by=["field2"]) expected = pd.DataFrame( [[1, 1.0], [3, np.nan], [4, 4.0]], columns=["field1", "field1"] ) result = getattr(df_grouped, func)() tm.assert_frame_equal(result, expected) def test_ffill_missing_arguments(): # GH 14955 df = pd.DataFrame({"a": [1, 2], "b": [1, 1]}) with pytest.raises(ValueError, match="Must specify a fill"): df.groupby("b").fillna() def test_fill_consistency(): # GH9221 # pass thru keyword arguments to the generated wrapper # are set if the passed kw is None (only) df = DataFrame( index=pd.MultiIndex.from_product( [["value1", "value2"], date_range("2014-01-01", "2014-01-06")] ), columns=Index(["1", "2"], name="id"), ) df["1"] = [ np.nan, 1, np.nan, np.nan, 11, np.nan, np.nan, 2, np.nan, np.nan, 22, np.nan, ] df["2"] = [ np.nan, 3, np.nan, np.nan, 33, np.nan, np.nan, 4, np.nan, np.nan, 44, np.nan, ] expected = df.groupby(level=0, axis=0).fillna(method="ffill") result = df.T.groupby(level=0, axis=1).fillna(method="ffill").T tm.assert_frame_equal(result, expected)
rs2/pandas
pandas/tests/groupby/test_missing.py
pandas/core/array_algos/masked_reductions.py
import numpy as np import pytest import pandas as pd from pandas.core.internals import ObjectBlock from .base import BaseExtensionTests class BaseCastingTests(BaseExtensionTests): """Casting to and from ExtensionDtypes""" def test_astype_object_series(self, all_data): ser = pd.Series(all_data, name="A") result = ser.astype(object) assert isinstance(result._mgr.blocks[0], ObjectBlock) def test_astype_object_frame(self, all_data): df = pd.DataFrame({"A": all_data}) result = df.astype(object) blk = result._data.blocks[0] assert isinstance(blk, ObjectBlock), type(blk) # FIXME: these currently fail; dont leave commented-out # check that we can compare the dtypes # cmp = result.dtypes.equals(df.dtypes) # assert not cmp.any() def test_tolist(self, data): result = pd.Series(data).tolist() expected = list(data) assert result == expected def test_astype_str(self, data): result = pd.Series(data[:5]).astype(str) expected = pd.Series([str(x) for x in data[:5]], dtype=str) self.assert_series_equal(result, expected) def test_astype_string(self, data): # GH-33465 result = pd.Series(data[:5]).astype("string") expected = pd.Series([str(x) for x in data[:5]], dtype="string") self.assert_series_equal(result, expected) def test_to_numpy(self, data): expected = np.asarray(data) result = data.to_numpy() self.assert_equal(result, expected) result = pd.Series(data).to_numpy() self.assert_equal(result, expected) def test_astype_empty_dataframe(self, dtype): # https://github.com/pandas-dev/pandas/issues/33113 df = pd.DataFrame() result = df.astype(dtype) self.assert_frame_equal(result, df) @pytest.mark.parametrize("copy", [True, False]) def test_astype_own_type(self, data, copy): # ensure that astype returns the original object for equal dtype and copy=False # https://github.com/pandas-dev/pandas/issues/28488 result = data.astype(data.dtype, copy=copy) assert (result is data) is (not copy) self.assert_extension_array_equal(result, data)
import numpy as np import pytest import pandas as pd from pandas import DataFrame, Index, date_range import pandas._testing as tm @pytest.mark.parametrize("func", ["ffill", "bfill"]) def test_groupby_column_index_name_lost_fill_funcs(func): # GH: 29764 groupby loses index sometimes df = pd.DataFrame( [[1, 1.0, -1.0], [1, np.nan, np.nan], [1, 2.0, -2.0]], columns=pd.Index(["type", "a", "b"], name="idx"), ) df_grouped = df.groupby(["type"])[["a", "b"]] result = getattr(df_grouped, func)().columns expected = pd.Index(["a", "b"], name="idx") tm.assert_index_equal(result, expected) @pytest.mark.parametrize("func", ["ffill", "bfill"]) def test_groupby_fill_duplicate_column_names(func): # GH: 25610 ValueError with duplicate column names df1 = pd.DataFrame({"field1": [1, 3, 4], "field2": [1, 3, 4]}) df2 = pd.DataFrame({"field1": [1, np.nan, 4]}) df_grouped = pd.concat([df1, df2], axis=1).groupby(by=["field2"]) expected = pd.DataFrame( [[1, 1.0], [3, np.nan], [4, 4.0]], columns=["field1", "field1"] ) result = getattr(df_grouped, func)() tm.assert_frame_equal(result, expected) def test_ffill_missing_arguments(): # GH 14955 df = pd.DataFrame({"a": [1, 2], "b": [1, 1]}) with pytest.raises(ValueError, match="Must specify a fill"): df.groupby("b").fillna() def test_fill_consistency(): # GH9221 # pass thru keyword arguments to the generated wrapper # are set if the passed kw is None (only) df = DataFrame( index=pd.MultiIndex.from_product( [["value1", "value2"], date_range("2014-01-01", "2014-01-06")] ), columns=Index(["1", "2"], name="id"), ) df["1"] = [ np.nan, 1, np.nan, np.nan, 11, np.nan, np.nan, 2, np.nan, np.nan, 22, np.nan, ] df["2"] = [ np.nan, 3, np.nan, np.nan, 33, np.nan, np.nan, 4, np.nan, np.nan, 44, np.nan, ] expected = df.groupby(level=0, axis=0).fillna(method="ffill") result = df.T.groupby(level=0, axis=1).fillna(method="ffill").T tm.assert_frame_equal(result, expected)
rs2/pandas
pandas/tests/groupby/test_missing.py
pandas/tests/extension/base/casting.py
""" Pyperclip A cross-platform clipboard module for Python, with copy & paste functions for plain text. By Al Sweigart al@inventwithpython.com BSD License Usage: import pyperclip pyperclip.copy('The text to be copied to the clipboard.') spam = pyperclip.paste() if not pyperclip.is_available(): print("Copy functionality unavailable!") On Windows, no additional modules are needed. On Mac, the pyobjc module is used, falling back to the pbcopy and pbpaste cli commands. (These commands should come with OS X.). On Linux, install xclip or xsel via package manager. For example, in Debian: sudo apt-get install xclip sudo apt-get install xsel Otherwise on Linux, you will need the PyQt5 modules installed. This module does not work with PyGObject yet. Cygwin is currently not supported. Security Note: This module runs programs with these names: - which - where - pbcopy - pbpaste - xclip - xsel - klipper - qdbus A malicious user could rename or add programs with these names, tricking Pyperclip into running them with whatever permissions the Python process has. """ __version__ = "1.7.0" import contextlib import ctypes from ctypes import c_size_t, c_wchar, c_wchar_p, get_errno, sizeof import os import platform import subprocess import time import warnings # `import PyQt4` sys.exit()s if DISPLAY is not in the environment. # Thus, we need to detect the presence of $DISPLAY manually # and not load PyQt4 if it is absent. HAS_DISPLAY = os.getenv("DISPLAY", False) EXCEPT_MSG = """ Pyperclip could not find a copy/paste mechanism for your system. For more information, please visit https://pyperclip.readthedocs.io/en/latest/introduction.html#not-implemented-error """ ENCODING = "utf-8" # The "which" unix command finds where a command is. if platform.system() == "Windows": WHICH_CMD = "where" else: WHICH_CMD = "which" def _executable_exists(name): return ( subprocess.call( [WHICH_CMD, name], stdout=subprocess.PIPE, stderr=subprocess.PIPE ) == 0 ) # Exceptions class PyperclipException(RuntimeError): pass class PyperclipWindowsException(PyperclipException): def __init__(self, message): message += f" ({ctypes.WinError()})" super().__init__(message) def _stringifyText(text) -> str: acceptedTypes = (str, int, float, bool) if not isinstance(text, acceptedTypes): raise PyperclipException( f"only str, int, float, and bool values " f"can be copied to the clipboard, not {type(text).__name__}" ) return str(text) def init_osx_pbcopy_clipboard(): def copy_osx_pbcopy(text): text = _stringifyText(text) # Converts non-str values to str. p = subprocess.Popen(["pbcopy", "w"], stdin=subprocess.PIPE, close_fds=True) p.communicate(input=text.encode(ENCODING)) def paste_osx_pbcopy(): p = subprocess.Popen(["pbpaste", "r"], stdout=subprocess.PIPE, close_fds=True) stdout, stderr = p.communicate() return stdout.decode(ENCODING) return copy_osx_pbcopy, paste_osx_pbcopy def init_osx_pyobjc_clipboard(): def copy_osx_pyobjc(text): """Copy string argument to clipboard""" text = _stringifyText(text) # Converts non-str values to str. newStr = Foundation.NSString.stringWithString_(text).nsstring() newData = newStr.dataUsingEncoding_(Foundation.NSUTF8StringEncoding) board = AppKit.NSPasteboard.generalPasteboard() board.declareTypes_owner_([AppKit.NSStringPboardType], None) board.setData_forType_(newData, AppKit.NSStringPboardType) def paste_osx_pyobjc(): """Returns contents of clipboard""" board = AppKit.NSPasteboard.generalPasteboard() content = board.stringForType_(AppKit.NSStringPboardType) return content return copy_osx_pyobjc, paste_osx_pyobjc def init_qt_clipboard(): global QApplication # $DISPLAY should exist # Try to import from qtpy, but if that fails try PyQt5 then PyQt4 try: from qtpy.QtWidgets import QApplication except ImportError: try: from PyQt5.QtWidgets import QApplication except ImportError: from PyQt4.QtGui import QApplication app = QApplication.instance() if app is None: app = QApplication([]) def copy_qt(text): text = _stringifyText(text) # Converts non-str values to str. cb = app.clipboard() cb.setText(text) def paste_qt() -> str: cb = app.clipboard() return str(cb.text()) return copy_qt, paste_qt def init_xclip_clipboard(): DEFAULT_SELECTION = "c" PRIMARY_SELECTION = "p" def copy_xclip(text, primary=False): text = _stringifyText(text) # Converts non-str values to str. selection = DEFAULT_SELECTION if primary: selection = PRIMARY_SELECTION p = subprocess.Popen( ["xclip", "-selection", selection], stdin=subprocess.PIPE, close_fds=True ) p.communicate(input=text.encode(ENCODING)) def paste_xclip(primary=False): selection = DEFAULT_SELECTION if primary: selection = PRIMARY_SELECTION p = subprocess.Popen( ["xclip", "-selection", selection, "-o"], stdout=subprocess.PIPE, stderr=subprocess.PIPE, close_fds=True, ) stdout, stderr = p.communicate() # Intentionally ignore extraneous output on stderr when clipboard is empty return stdout.decode(ENCODING) return copy_xclip, paste_xclip def init_xsel_clipboard(): DEFAULT_SELECTION = "-b" PRIMARY_SELECTION = "-p" def copy_xsel(text, primary=False): text = _stringifyText(text) # Converts non-str values to str. selection_flag = DEFAULT_SELECTION if primary: selection_flag = PRIMARY_SELECTION p = subprocess.Popen( ["xsel", selection_flag, "-i"], stdin=subprocess.PIPE, close_fds=True ) p.communicate(input=text.encode(ENCODING)) def paste_xsel(primary=False): selection_flag = DEFAULT_SELECTION if primary: selection_flag = PRIMARY_SELECTION p = subprocess.Popen( ["xsel", selection_flag, "-o"], stdout=subprocess.PIPE, close_fds=True ) stdout, stderr = p.communicate() return stdout.decode(ENCODING) return copy_xsel, paste_xsel def init_klipper_clipboard(): def copy_klipper(text): text = _stringifyText(text) # Converts non-str values to str. p = subprocess.Popen( [ "qdbus", "org.kde.klipper", "/klipper", "setClipboardContents", text.encode(ENCODING), ], stdin=subprocess.PIPE, close_fds=True, ) p.communicate(input=None) def paste_klipper(): p = subprocess.Popen( ["qdbus", "org.kde.klipper", "/klipper", "getClipboardContents"], stdout=subprocess.PIPE, close_fds=True, ) stdout, stderr = p.communicate() # Workaround for https://bugs.kde.org/show_bug.cgi?id=342874 # TODO: https://github.com/asweigart/pyperclip/issues/43 clipboardContents = stdout.decode(ENCODING) # even if blank, Klipper will append a newline at the end assert len(clipboardContents) > 0 # make sure that newline is there assert clipboardContents.endswith("\n") if clipboardContents.endswith("\n"): clipboardContents = clipboardContents[:-1] return clipboardContents return copy_klipper, paste_klipper def init_dev_clipboard_clipboard(): def copy_dev_clipboard(text): text = _stringifyText(text) # Converts non-str values to str. if text == "": warnings.warn( "Pyperclip cannot copy a blank string to the clipboard on Cygwin." "This is effectively a no-op." ) if "\r" in text: warnings.warn("Pyperclip cannot handle \\r characters on Cygwin.") with open("/dev/clipboard", "wt") as fo: fo.write(text) def paste_dev_clipboard() -> str: with open("/dev/clipboard") as fo: content = fo.read() return content return copy_dev_clipboard, paste_dev_clipboard def init_no_clipboard(): class ClipboardUnavailable: def __call__(self, *args, **kwargs): raise PyperclipException(EXCEPT_MSG) def __bool__(self) -> bool: return False return ClipboardUnavailable(), ClipboardUnavailable() # Windows-related clipboard functions: class CheckedCall: def __init__(self, f): super().__setattr__("f", f) def __call__(self, *args): ret = self.f(*args) if not ret and get_errno(): raise PyperclipWindowsException("Error calling " + self.f.__name__) return ret def __setattr__(self, key, value): setattr(self.f, key, value) def init_windows_clipboard(): global HGLOBAL, LPVOID, DWORD, LPCSTR, INT global HWND, HINSTANCE, HMENU, BOOL, UINT, HANDLE from ctypes.wintypes import ( BOOL, DWORD, HANDLE, HGLOBAL, HINSTANCE, HMENU, HWND, INT, LPCSTR, LPVOID, UINT, ) windll = ctypes.windll msvcrt = ctypes.CDLL("msvcrt") safeCreateWindowExA = CheckedCall(windll.user32.CreateWindowExA) safeCreateWindowExA.argtypes = [ DWORD, LPCSTR, LPCSTR, DWORD, INT, INT, INT, INT, HWND, HMENU, HINSTANCE, LPVOID, ] safeCreateWindowExA.restype = HWND safeDestroyWindow = CheckedCall(windll.user32.DestroyWindow) safeDestroyWindow.argtypes = [HWND] safeDestroyWindow.restype = BOOL OpenClipboard = windll.user32.OpenClipboard OpenClipboard.argtypes = [HWND] OpenClipboard.restype = BOOL safeCloseClipboard = CheckedCall(windll.user32.CloseClipboard) safeCloseClipboard.argtypes = [] safeCloseClipboard.restype = BOOL safeEmptyClipboard = CheckedCall(windll.user32.EmptyClipboard) safeEmptyClipboard.argtypes = [] safeEmptyClipboard.restype = BOOL safeGetClipboardData = CheckedCall(windll.user32.GetClipboardData) safeGetClipboardData.argtypes = [UINT] safeGetClipboardData.restype = HANDLE safeSetClipboardData = CheckedCall(windll.user32.SetClipboardData) safeSetClipboardData.argtypes = [UINT, HANDLE] safeSetClipboardData.restype = HANDLE safeGlobalAlloc = CheckedCall(windll.kernel32.GlobalAlloc) safeGlobalAlloc.argtypes = [UINT, c_size_t] safeGlobalAlloc.restype = HGLOBAL safeGlobalLock = CheckedCall(windll.kernel32.GlobalLock) safeGlobalLock.argtypes = [HGLOBAL] safeGlobalLock.restype = LPVOID safeGlobalUnlock = CheckedCall(windll.kernel32.GlobalUnlock) safeGlobalUnlock.argtypes = [HGLOBAL] safeGlobalUnlock.restype = BOOL wcslen = CheckedCall(msvcrt.wcslen) wcslen.argtypes = [c_wchar_p] wcslen.restype = UINT GMEM_MOVEABLE = 0x0002 CF_UNICODETEXT = 13 @contextlib.contextmanager def window(): """ Context that provides a valid Windows hwnd. """ # we really just need the hwnd, so setting "STATIC" # as predefined lpClass is just fine. hwnd = safeCreateWindowExA( 0, b"STATIC", None, 0, 0, 0, 0, 0, None, None, None, None ) try: yield hwnd finally: safeDestroyWindow(hwnd) @contextlib.contextmanager def clipboard(hwnd): """ Context manager that opens the clipboard and prevents other applications from modifying the clipboard content. """ # We may not get the clipboard handle immediately because # some other application is accessing it (?) # We try for at least 500ms to get the clipboard. t = time.time() + 0.5 success = False while time.time() < t: success = OpenClipboard(hwnd) if success: break time.sleep(0.01) if not success: raise PyperclipWindowsException("Error calling OpenClipboard") try: yield finally: safeCloseClipboard() def copy_windows(text): # This function is heavily based on # http://msdn.com/ms649016#_win32_Copying_Information_to_the_Clipboard text = _stringifyText(text) # Converts non-str values to str. with window() as hwnd: # http://msdn.com/ms649048 # If an application calls OpenClipboard with hwnd set to NULL, # EmptyClipboard sets the clipboard owner to NULL; # this causes SetClipboardData to fail. # => We need a valid hwnd to copy something. with clipboard(hwnd): safeEmptyClipboard() if text: # http://msdn.com/ms649051 # If the hMem parameter identifies a memory object, # the object must have been allocated using the # function with the GMEM_MOVEABLE flag. count = wcslen(text) + 1 handle = safeGlobalAlloc(GMEM_MOVEABLE, count * sizeof(c_wchar)) locked_handle = safeGlobalLock(handle) ctypes.memmove( c_wchar_p(locked_handle), c_wchar_p(text), count * sizeof(c_wchar), ) safeGlobalUnlock(handle) safeSetClipboardData(CF_UNICODETEXT, handle) def paste_windows(): with clipboard(None): handle = safeGetClipboardData(CF_UNICODETEXT) if not handle: # GetClipboardData may return NULL with errno == NO_ERROR # if the clipboard is empty. # (Also, it may return a handle to an empty buffer, # but technically that's not empty) return "" return c_wchar_p(handle).value return copy_windows, paste_windows def init_wsl_clipboard(): def copy_wsl(text): text = _stringifyText(text) # Converts non-str values to str. p = subprocess.Popen(["clip.exe"], stdin=subprocess.PIPE, close_fds=True) p.communicate(input=text.encode(ENCODING)) def paste_wsl(): p = subprocess.Popen( ["powershell.exe", "-command", "Get-Clipboard"], stdout=subprocess.PIPE, stderr=subprocess.PIPE, close_fds=True, ) stdout, stderr = p.communicate() # WSL appends "\r\n" to the contents. return stdout[:-2].decode(ENCODING) return copy_wsl, paste_wsl # Automatic detection of clipboard mechanisms # and importing is done in determine_clipboard(): def determine_clipboard(): """ Determine the OS/platform and set the copy() and paste() functions accordingly. """ global Foundation, AppKit, qtpy, PyQt4, PyQt5 # Setup for the CYGWIN platform: if ( "cygwin" in platform.system().lower() ): # Cygwin has a variety of values returned by platform.system(), # such as 'CYGWIN_NT-6.1' # FIXME: pyperclip currently does not support Cygwin, # see https://github.com/asweigart/pyperclip/issues/55 if os.path.exists("/dev/clipboard"): warnings.warn( "Pyperclip's support for Cygwin is not perfect," "see https://github.com/asweigart/pyperclip/issues/55" ) return init_dev_clipboard_clipboard() # Setup for the WINDOWS platform: elif os.name == "nt" or platform.system() == "Windows": return init_windows_clipboard() if platform.system() == "Linux": with open("/proc/version") as f: if "Microsoft" in f.read(): return init_wsl_clipboard() # Setup for the MAC OS X platform: if os.name == "mac" or platform.system() == "Darwin": try: import AppKit import Foundation # check if pyobjc is installed except ImportError: return init_osx_pbcopy_clipboard() else: return init_osx_pyobjc_clipboard() # Setup for the LINUX platform: if HAS_DISPLAY: if _executable_exists("xsel"): return init_xsel_clipboard() if _executable_exists("xclip"): return init_xclip_clipboard() if _executable_exists("klipper") and _executable_exists("qdbus"): return init_klipper_clipboard() try: # qtpy is a small abstraction layer that lets you write applications # using a single api call to either PyQt or PySide. # https://pypi.python.org/project/QtPy import qtpy # check if qtpy is installed except ImportError: # If qtpy isn't installed, fall back on importing PyQt4. try: import PyQt5 # check if PyQt5 is installed except ImportError: try: import PyQt4 # check if PyQt4 is installed except ImportError: pass # We want to fail fast for all non-ImportError exceptions. else: return init_qt_clipboard() else: return init_qt_clipboard() else: return init_qt_clipboard() return init_no_clipboard() def set_clipboard(clipboard): """ Explicitly sets the clipboard mechanism. The "clipboard mechanism" is how the copy() and paste() functions interact with the operating system to implement the copy/paste feature. The clipboard parameter must be one of: - pbcopy - pbobjc (default on Mac OS X) - qt - xclip - xsel - klipper - windows (default on Windows) - no (this is what is set when no clipboard mechanism can be found) """ global copy, paste clipboard_types = { "pbcopy": init_osx_pbcopy_clipboard, "pyobjc": init_osx_pyobjc_clipboard, "qt": init_qt_clipboard, # TODO - split this into 'qtpy', 'pyqt4', and 'pyqt5' "xclip": init_xclip_clipboard, "xsel": init_xsel_clipboard, "klipper": init_klipper_clipboard, "windows": init_windows_clipboard, "no": init_no_clipboard, } if clipboard not in clipboard_types: allowed_clipboard_types = [repr(_) for _ in clipboard_types.keys()] raise ValueError( f"Argument must be one of {', '.join(allowed_clipboard_types)}" ) # Sets pyperclip's copy() and paste() functions: copy, paste = clipboard_types[clipboard]() def lazy_load_stub_copy(text): """ A stub function for copy(), which will load the real copy() function when called so that the real copy() function is used for later calls. This allows users to import pyperclip without having determine_clipboard() automatically run, which will automatically select a clipboard mechanism. This could be a problem if it selects, say, the memory-heavy PyQt4 module but the user was just going to immediately call set_clipboard() to use a different clipboard mechanism. The lazy loading this stub function implements gives the user a chance to call set_clipboard() to pick another clipboard mechanism. Or, if the user simply calls copy() or paste() without calling set_clipboard() first, will fall back on whatever clipboard mechanism that determine_clipboard() automatically chooses. """ global copy, paste copy, paste = determine_clipboard() return copy(text) def lazy_load_stub_paste(): """ A stub function for paste(), which will load the real paste() function when called so that the real paste() function is used for later calls. This allows users to import pyperclip without having determine_clipboard() automatically run, which will automatically select a clipboard mechanism. This could be a problem if it selects, say, the memory-heavy PyQt4 module but the user was just going to immediately call set_clipboard() to use a different clipboard mechanism. The lazy loading this stub function implements gives the user a chance to call set_clipboard() to pick another clipboard mechanism. Or, if the user simply calls copy() or paste() without calling set_clipboard() first, will fall back on whatever clipboard mechanism that determine_clipboard() automatically chooses. """ global copy, paste copy, paste = determine_clipboard() return paste() def is_available() -> bool: return copy != lazy_load_stub_copy and paste != lazy_load_stub_paste # Initially, copy() and paste() are set to lazy loading wrappers which will # set `copy` and `paste` to real functions the first time they're used, unless # set_clipboard() or determine_clipboard() is called first. copy, paste = lazy_load_stub_copy, lazy_load_stub_paste __all__ = ["copy", "paste", "set_clipboard", "determine_clipboard"] # pandas aliases clipboard_get = paste clipboard_set = copy
import numpy as np import pytest import pandas as pd from pandas import DataFrame, Index, date_range import pandas._testing as tm @pytest.mark.parametrize("func", ["ffill", "bfill"]) def test_groupby_column_index_name_lost_fill_funcs(func): # GH: 29764 groupby loses index sometimes df = pd.DataFrame( [[1, 1.0, -1.0], [1, np.nan, np.nan], [1, 2.0, -2.0]], columns=pd.Index(["type", "a", "b"], name="idx"), ) df_grouped = df.groupby(["type"])[["a", "b"]] result = getattr(df_grouped, func)().columns expected = pd.Index(["a", "b"], name="idx") tm.assert_index_equal(result, expected) @pytest.mark.parametrize("func", ["ffill", "bfill"]) def test_groupby_fill_duplicate_column_names(func): # GH: 25610 ValueError with duplicate column names df1 = pd.DataFrame({"field1": [1, 3, 4], "field2": [1, 3, 4]}) df2 = pd.DataFrame({"field1": [1, np.nan, 4]}) df_grouped = pd.concat([df1, df2], axis=1).groupby(by=["field2"]) expected = pd.DataFrame( [[1, 1.0], [3, np.nan], [4, 4.0]], columns=["field1", "field1"] ) result = getattr(df_grouped, func)() tm.assert_frame_equal(result, expected) def test_ffill_missing_arguments(): # GH 14955 df = pd.DataFrame({"a": [1, 2], "b": [1, 1]}) with pytest.raises(ValueError, match="Must specify a fill"): df.groupby("b").fillna() def test_fill_consistency(): # GH9221 # pass thru keyword arguments to the generated wrapper # are set if the passed kw is None (only) df = DataFrame( index=pd.MultiIndex.from_product( [["value1", "value2"], date_range("2014-01-01", "2014-01-06")] ), columns=Index(["1", "2"], name="id"), ) df["1"] = [ np.nan, 1, np.nan, np.nan, 11, np.nan, np.nan, 2, np.nan, np.nan, 22, np.nan, ] df["2"] = [ np.nan, 3, np.nan, np.nan, 33, np.nan, np.nan, 4, np.nan, np.nan, 44, np.nan, ] expected = df.groupby(level=0, axis=0).fillna(method="ffill") result = df.T.groupby(level=0, axis=1).fillna(method="ffill").T tm.assert_frame_equal(result, expected)
rs2/pandas
pandas/tests/groupby/test_missing.py
pandas/io/clipboard/__init__.py
""" miscellaneous sorting / groupby utilities """ from collections import defaultdict from typing import ( TYPE_CHECKING, Callable, DefaultDict, Iterable, List, Optional, Tuple, Union, ) import numpy as np from pandas._libs import algos, hashtable, lib from pandas._libs.hashtable import unique_label_indices from pandas._typing import IndexKeyFunc from pandas.core.dtypes.common import ( ensure_int64, ensure_platform_int, is_extension_array_dtype, ) from pandas.core.dtypes.generic import ABCMultiIndex from pandas.core.dtypes.missing import isna import pandas.core.algorithms as algorithms from pandas.core.construction import extract_array if TYPE_CHECKING: from pandas.core.indexes.base import Index _INT64_MAX = np.iinfo(np.int64).max def get_indexer_indexer( target: "Index", level: Union[str, int, List[str], List[int]], ascending: bool, kind: str, na_position: str, sort_remaining: bool, key: IndexKeyFunc, ) -> Optional[np.array]: """ Helper method that return the indexer according to input parameters for the sort_index method of DataFrame and Series. Parameters ---------- target : Index level : int or level name or list of ints or list of level names ascending : bool or list of bools, default True kind : {'quicksort', 'mergesort', 'heapsort'}, default 'quicksort' na_position : {'first', 'last'}, default 'last' sort_remaining : bool, default True key : callable, optional Returns ------- Optional[ndarray] The indexer for the new index. """ target = ensure_key_mapped(target, key, levels=level) target = target._sort_levels_monotonic() if level is not None: _, indexer = target.sortlevel( level, ascending=ascending, sort_remaining=sort_remaining ) elif isinstance(target, ABCMultiIndex): indexer = lexsort_indexer( target._get_codes_for_sorting(), orders=ascending, na_position=na_position ) else: # Check monotonic-ness before sort an index (GH 11080) if (ascending and target.is_monotonic_increasing) or ( not ascending and target.is_monotonic_decreasing ): return None indexer = nargsort( target, kind=kind, ascending=ascending, na_position=na_position ) return indexer def get_group_index(labels, shape, sort: bool, xnull: bool): """ For the particular label_list, gets the offsets into the hypothetical list representing the totally ordered cartesian product of all possible label combinations, *as long as* this space fits within int64 bounds; otherwise, though group indices identify unique combinations of labels, they cannot be deconstructed. - If `sort`, rank of returned ids preserve lexical ranks of labels. i.e. returned id's can be used to do lexical sort on labels; - If `xnull` nulls (-1 labels) are passed through. Parameters ---------- labels : sequence of arrays Integers identifying levels at each location shape : sequence of ints Number of unique levels at each location sort : bool If the ranks of returned ids should match lexical ranks of labels xnull : bool If true nulls are excluded. i.e. -1 values in the labels are passed through. Returns ------- An array of type int64 where two elements are equal if their corresponding labels are equal at all location. Notes ----- The length of `labels` and `shape` must be identical. """ def _int64_cut_off(shape) -> int: acc = 1 for i, mul in enumerate(shape): acc *= int(mul) if not acc < _INT64_MAX: return i return len(shape) def maybe_lift(lab, size): # promote nan values (assigned -1 label in lab array) # so that all output values are non-negative return (lab + 1, size + 1) if (lab == -1).any() else (lab, size) labels = map(ensure_int64, labels) if not xnull: labels, shape = map(list, zip(*map(maybe_lift, labels, shape))) labels = list(labels) shape = list(shape) # Iteratively process all the labels in chunks sized so less # than _INT64_MAX unique int ids will be required for each chunk while True: # how many levels can be done without overflow: nlev = _int64_cut_off(shape) # compute flat ids for the first `nlev` levels stride = np.prod(shape[1:nlev], dtype="i8") out = stride * labels[0].astype("i8", subok=False, copy=False) for i in range(1, nlev): if shape[i] == 0: stride = 0 else: stride //= shape[i] out += labels[i] * stride if xnull: # exclude nulls mask = labels[0] == -1 for lab in labels[1:nlev]: mask |= lab == -1 out[mask] = -1 if nlev == len(shape): # all levels done! break # compress what has been done so far in order to avoid overflow # to retain lexical ranks, obs_ids should be sorted comp_ids, obs_ids = compress_group_index(out, sort=sort) labels = [comp_ids] + labels[nlev:] shape = [len(obs_ids)] + shape[nlev:] return out def get_compressed_ids(labels, sizes): """ Group_index is offsets into cartesian product of all possible labels. This space can be huge, so this function compresses it, by computing offsets (comp_ids) into the list of unique labels (obs_group_ids). Parameters ---------- labels : list of label arrays sizes : list of size of the levels Returns ------- tuple of (comp_ids, obs_group_ids) """ ids = get_group_index(labels, sizes, sort=True, xnull=False) return compress_group_index(ids, sort=True) def is_int64_overflow_possible(shape) -> bool: the_prod = 1 for x in shape: the_prod *= int(x) return the_prod >= _INT64_MAX def decons_group_index(comp_labels, shape): # reconstruct labels if is_int64_overflow_possible(shape): # at some point group indices are factorized, # and may not be deconstructed here! wrong path! raise ValueError("cannot deconstruct factorized group indices!") label_list = [] factor = 1 y = 0 x = comp_labels for i in reversed(range(len(shape))): labels = (x - y) % (factor * shape[i]) // factor np.putmask(labels, comp_labels < 0, -1) label_list.append(labels) y = labels * factor factor *= shape[i] return label_list[::-1] def decons_obs_group_ids(comp_ids, obs_ids, shape, labels, xnull: bool): """ Reconstruct labels from observed group ids. Parameters ---------- xnull : bool If nulls are excluded; i.e. -1 labels are passed through. """ if not xnull: lift = np.fromiter(((a == -1).any() for a in labels), dtype="i8") shape = np.asarray(shape, dtype="i8") + lift if not is_int64_overflow_possible(shape): # obs ids are deconstructable! take the fast route! out = decons_group_index(obs_ids, shape) return out if xnull or not lift.any() else [x - y for x, y in zip(out, lift)] i = unique_label_indices(comp_ids) i8copy = lambda a: a.astype("i8", subok=False, copy=True) return [i8copy(lab[i]) for lab in labels] def indexer_from_factorized(labels, shape, compress: bool = True): ids = get_group_index(labels, shape, sort=True, xnull=False) if not compress: ngroups = (ids.size and ids.max()) + 1 else: ids, obs = compress_group_index(ids, sort=True) ngroups = len(obs) return get_group_index_sorter(ids, ngroups) def lexsort_indexer( keys, orders=None, na_position: str = "last", key: Optional[Callable] = None ): """ Performs lexical sorting on a set of keys Parameters ---------- keys : sequence of arrays Sequence of ndarrays to be sorted by the indexer orders : boolean or list of booleans, optional Determines the sorting order for each element in keys. If a list, it must be the same length as keys. This determines whether the corresponding element in keys should be sorted in ascending (True) or descending (False) order. if bool, applied to all elements as above. if None, defaults to True. na_position : {'first', 'last'}, default 'last' Determines placement of NA elements in the sorted list ("last" or "first") key : Callable, optional Callable key function applied to every element in keys before sorting .. versionadded:: 1.0.0 """ from pandas.core.arrays import Categorical labels = [] shape = [] if isinstance(orders, bool): orders = [orders] * len(keys) elif orders is None: orders = [True] * len(keys) keys = [ensure_key_mapped(k, key) for k in keys] for k, order in zip(keys, orders): cat = Categorical(k, ordered=True) if na_position not in ["last", "first"]: raise ValueError(f"invalid na_position: {na_position}") n = len(cat.categories) codes = cat.codes.copy() mask = cat.codes == -1 if order: # ascending if na_position == "last": codes = np.where(mask, n, codes) elif na_position == "first": codes += 1 else: # not order means descending if na_position == "last": codes = np.where(mask, n, n - codes - 1) elif na_position == "first": codes = np.where(mask, 0, n - codes) if mask.any(): n += 1 shape.append(n) labels.append(codes) return indexer_from_factorized(labels, shape) def nargsort( items, kind: str = "quicksort", ascending: bool = True, na_position: str = "last", key: Optional[Callable] = None, ): """ Intended to be a drop-in replacement for np.argsort which handles NaNs. Adds ascending, na_position, and key parameters. (GH #6399, #5231, #27237) Parameters ---------- kind : str, default 'quicksort' ascending : bool, default True na_position : {'first', 'last'}, default 'last' key : Optional[Callable], default None """ if key is not None: items = ensure_key_mapped(items, key) return nargsort( items, kind=kind, ascending=ascending, na_position=na_position, key=None ) items = extract_array(items) mask = np.asarray(isna(items)) if is_extension_array_dtype(items): items = items._values_for_argsort() else: items = np.asanyarray(items) idx = np.arange(len(items)) non_nans = items[~mask] non_nan_idx = idx[~mask] nan_idx = np.nonzero(mask)[0] if not ascending: non_nans = non_nans[::-1] non_nan_idx = non_nan_idx[::-1] indexer = non_nan_idx[non_nans.argsort(kind=kind)] if not ascending: indexer = indexer[::-1] # Finally, place the NaNs at the end or the beginning according to # na_position if na_position == "last": indexer = np.concatenate([indexer, nan_idx]) elif na_position == "first": indexer = np.concatenate([nan_idx, indexer]) else: raise ValueError(f"invalid na_position: {na_position}") return indexer def nargminmax(values, method: str): """ Implementation of np.argmin/argmax but for ExtensionArray and which handles missing values. Parameters ---------- values : ExtensionArray method : {"argmax", "argmin"} Returns ------- int """ assert method in {"argmax", "argmin"} func = np.argmax if method == "argmax" else np.argmin mask = np.asarray(isna(values)) values = values._values_for_argsort() idx = np.arange(len(values)) non_nans = values[~mask] non_nan_idx = idx[~mask] return non_nan_idx[func(non_nans)] def ensure_key_mapped_multiindex(index, key: Callable, level=None): """ Returns a new MultiIndex in which key has been applied to all levels specified in level (or all levels if level is None). Used for key sorting for MultiIndex. Parameters ---------- index : MultiIndex Index to which to apply the key function on the specified levels. key : Callable Function that takes an Index and returns an Index of the same shape. This key is applied to each level separately. The name of the level can be used to distinguish different levels for application. level : list-like, int or str, default None Level or list of levels to apply the key function to. If None, key function is applied to all levels. Other levels are left unchanged. Returns ------- labels : MultiIndex Resulting MultiIndex with modified levels. """ from pandas.core.indexes.api import MultiIndex if level is not None: if isinstance(level, (str, int)): sort_levels = [level] else: sort_levels = level sort_levels = [index._get_level_number(lev) for lev in sort_levels] else: sort_levels = list(range(index.nlevels)) # satisfies mypy mapped = [ ensure_key_mapped(index._get_level_values(level), key) if level in sort_levels else index._get_level_values(level) for level in range(index.nlevels) ] labels = MultiIndex.from_arrays(mapped) return labels def ensure_key_mapped(values, key: Optional[Callable], levels=None): """ Applies a callable key function to the values function and checks that the resulting value has the same shape. Can be called on Index subclasses, Series, DataFrames, or ndarrays. Parameters ---------- values : Series, DataFrame, Index subclass, or ndarray key : Optional[Callable], key to be called on the values array levels : Optional[List], if values is a MultiIndex, list of levels to apply the key to. """ from pandas.core.indexes.api import Index # noqa:F811 if not key: return values if isinstance(values, ABCMultiIndex): return ensure_key_mapped_multiindex(values, key, level=levels) result = key(values.copy()) if len(result) != len(values): raise ValueError( "User-provided `key` function must not change the shape of the array." ) try: if isinstance( values, Index ): # convert to a new Index subclass, not necessarily the same result = Index(result) else: type_of_values = type(values) result = type_of_values(result) # try to revert to original type otherwise except TypeError: raise TypeError( f"User-provided `key` function returned an invalid type {type(result)} \ which could not be converted to {type(values)}." ) return result def get_flattened_list( comp_ids: np.ndarray, ngroups: int, levels: Iterable["Index"], labels: Iterable[np.ndarray], ) -> List[Tuple]: """Map compressed group id -> key tuple.""" comp_ids = comp_ids.astype(np.int64, copy=False) arrays: DefaultDict[int, List[int]] = defaultdict(list) for labs, level in zip(labels, levels): table = hashtable.Int64HashTable(ngroups) table.map(comp_ids, labs.astype(np.int64, copy=False)) for i in range(ngroups): arrays[i].append(level[table.get_item(i)]) return [tuple(array) for array in arrays.values()] def get_indexer_dict(label_list, keys): """ Returns ------- dict Labels mapped to indexers. """ shape = [len(x) for x in keys] group_index = get_group_index(label_list, shape, sort=True, xnull=True) ngroups = ( ((group_index.size and group_index.max()) + 1) if is_int64_overflow_possible(shape) else np.prod(shape, dtype="i8") ) sorter = get_group_index_sorter(group_index, ngroups) sorted_labels = [lab.take(sorter) for lab in label_list] group_index = group_index.take(sorter) return lib.indices_fast(sorter, group_index, keys, sorted_labels) # ---------------------------------------------------------------------- # sorting levels...cleverly? def get_group_index_sorter(group_index, ngroups: int): """ algos.groupsort_indexer implements `counting sort` and it is at least O(ngroups), where ngroups = prod(shape) shape = map(len, keys) that is, linear in the number of combinations (cartesian product) of unique values of groupby keys. This can be huge when doing multi-key groupby. np.argsort(kind='mergesort') is O(count x log(count)) where count is the length of the data-frame; Both algorithms are `stable` sort and that is necessary for correctness of groupby operations. e.g. consider: df.groupby(key)[col].transform('first') """ count = len(group_index) alpha = 0.0 # taking complexities literally; there may be beta = 1.0 # some room for fine-tuning these parameters do_groupsort = count > 0 and ((alpha + beta * ngroups) < (count * np.log(count))) if do_groupsort: sorter, _ = algos.groupsort_indexer(ensure_int64(group_index), ngroups) return ensure_platform_int(sorter) else: return group_index.argsort(kind="mergesort") def compress_group_index(group_index, sort: bool = True): """ Group_index is offsets into cartesian product of all possible labels. This space can be huge, so this function compresses it, by computing offsets (comp_ids) into the list of unique labels (obs_group_ids). """ size_hint = min(len(group_index), hashtable.SIZE_HINT_LIMIT) table = hashtable.Int64HashTable(size_hint) group_index = ensure_int64(group_index) # note, group labels come out ascending (ie, 1,2,3 etc) comp_ids, obs_group_ids = table.get_labels_groupby(group_index) if sort and len(obs_group_ids) > 0: obs_group_ids, comp_ids = _reorder_by_uniques(obs_group_ids, comp_ids) return comp_ids, obs_group_ids def _reorder_by_uniques(uniques, labels): # sorter is index where elements ought to go sorter = uniques.argsort() # reverse_indexer is where elements came from reverse_indexer = np.empty(len(sorter), dtype=np.int64) reverse_indexer.put(sorter, np.arange(len(sorter))) mask = labels < 0 # move labels to right locations (ie, unsort ascending labels) labels = algorithms.take_nd(reverse_indexer, labels, allow_fill=False) np.putmask(labels, mask, -1) # sort observed ids uniques = algorithms.take_nd(uniques, sorter, allow_fill=False) return uniques, labels
import numpy as np import pytest import pandas as pd from pandas import DataFrame, Index, date_range import pandas._testing as tm @pytest.mark.parametrize("func", ["ffill", "bfill"]) def test_groupby_column_index_name_lost_fill_funcs(func): # GH: 29764 groupby loses index sometimes df = pd.DataFrame( [[1, 1.0, -1.0], [1, np.nan, np.nan], [1, 2.0, -2.0]], columns=pd.Index(["type", "a", "b"], name="idx"), ) df_grouped = df.groupby(["type"])[["a", "b"]] result = getattr(df_grouped, func)().columns expected = pd.Index(["a", "b"], name="idx") tm.assert_index_equal(result, expected) @pytest.mark.parametrize("func", ["ffill", "bfill"]) def test_groupby_fill_duplicate_column_names(func): # GH: 25610 ValueError with duplicate column names df1 = pd.DataFrame({"field1": [1, 3, 4], "field2": [1, 3, 4]}) df2 = pd.DataFrame({"field1": [1, np.nan, 4]}) df_grouped = pd.concat([df1, df2], axis=1).groupby(by=["field2"]) expected = pd.DataFrame( [[1, 1.0], [3, np.nan], [4, 4.0]], columns=["field1", "field1"] ) result = getattr(df_grouped, func)() tm.assert_frame_equal(result, expected) def test_ffill_missing_arguments(): # GH 14955 df = pd.DataFrame({"a": [1, 2], "b": [1, 1]}) with pytest.raises(ValueError, match="Must specify a fill"): df.groupby("b").fillna() def test_fill_consistency(): # GH9221 # pass thru keyword arguments to the generated wrapper # are set if the passed kw is None (only) df = DataFrame( index=pd.MultiIndex.from_product( [["value1", "value2"], date_range("2014-01-01", "2014-01-06")] ), columns=Index(["1", "2"], name="id"), ) df["1"] = [ np.nan, 1, np.nan, np.nan, 11, np.nan, np.nan, 2, np.nan, np.nan, 22, np.nan, ] df["2"] = [ np.nan, 3, np.nan, np.nan, 33, np.nan, np.nan, 4, np.nan, np.nan, 44, np.nan, ] expected = df.groupby(level=0, axis=0).fillna(method="ffill") result = df.T.groupby(level=0, axis=1).fillna(method="ffill").T tm.assert_frame_equal(result, expected)
rs2/pandas
pandas/tests/groupby/test_missing.py
pandas/core/sorting.py
"""Config flow for Mobile App.""" import uuid from homeassistant import config_entries from homeassistant.components import person from homeassistant.helpers import entity_registry from .const import ATTR_APP_ID, ATTR_DEVICE_ID, ATTR_DEVICE_NAME, CONF_USER_ID, DOMAIN @config_entries.HANDLERS.register(DOMAIN) class MobileAppFlowHandler(config_entries.ConfigFlow): """Handle a Mobile App config flow.""" VERSION = 1 CONNECTION_CLASS = config_entries.CONN_CLASS_CLOUD_PUSH async def async_step_user(self, user_input=None): """Handle a flow initialized by the user.""" placeholders = { "apps_url": "https://www.home-assistant.io/integrations/mobile_app/#apps" } return self.async_abort( reason="install_app", description_placeholders=placeholders ) async def async_step_registration(self, user_input=None): """Handle a flow initialized during registration.""" if ATTR_DEVICE_ID in user_input: # Unique ID is combi of app + device ID. await self.async_set_unique_id( f"{user_input[ATTR_APP_ID]}-{user_input[ATTR_DEVICE_ID]}" ) else: user_input[ATTR_DEVICE_ID] = str(uuid.uuid4()).replace("-", "") # Register device tracker entity and add to person registering app ent_reg = await entity_registry.async_get_registry(self.hass) devt_entry = ent_reg.async_get_or_create( "device_tracker", DOMAIN, user_input[ATTR_DEVICE_ID], suggested_object_id=user_input[ATTR_DEVICE_NAME], ) await person.async_add_user_device_tracker( self.hass, user_input[CONF_USER_ID], devt_entry.entity_id ) return self.async_create_entry( title=user_input[ATTR_DEVICE_NAME], data=user_input )
"""The test for the statistics sensor platform.""" from datetime import datetime, timedelta import statistics import unittest import pytest from homeassistant.components import recorder from homeassistant.components.statistics.sensor import StatisticsSensor from homeassistant.const import ATTR_UNIT_OF_MEASUREMENT, STATE_UNKNOWN, TEMP_CELSIUS from homeassistant.setup import setup_component from homeassistant.util import dt as dt_util from tests.async_mock import patch from tests.common import ( fire_time_changed, get_test_home_assistant, init_recorder_component, ) from tests.components.recorder.common import wait_recording_done @pytest.fixture(autouse=True) def mock_legacy_time(legacy_patchable_time): """Make time patchable for all the tests.""" yield class TestStatisticsSensor(unittest.TestCase): """Test the Statistics sensor.""" def setup_method(self, method): """Set up things to be run when tests are started.""" self.hass = get_test_home_assistant() self.values = [17, 20, 15.2, 5, 3.8, 9.2, 6.7, 14, 6] self.count = len(self.values) self.min = min(self.values) self.max = max(self.values) self.total = sum(self.values) self.mean = round(sum(self.values) / len(self.values), 2) self.median = round(statistics.median(self.values), 2) self.deviation = round(statistics.stdev(self.values), 2) self.variance = round(statistics.variance(self.values), 2) self.change = round(self.values[-1] - self.values[0], 2) self.average_change = round(self.change / (len(self.values) - 1), 2) self.change_rate = round(self.change / (60 * (self.count - 1)), 2) self.addCleanup(self.hass.stop) def test_binary_sensor_source(self): """Test if source is a sensor.""" values = ["on", "off", "on", "off", "on", "off", "on"] assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "binary_sensor.test_monitored", } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() for value in values: self.hass.states.set("binary_sensor.test_monitored", value) self.hass.block_till_done() state = self.hass.states.get("sensor.test") assert str(len(values)) == state.state def test_sensor_source(self): """Test if source is a sensor.""" assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() for value in self.values: self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS} ) self.hass.block_till_done() state = self.hass.states.get("sensor.test") assert str(self.mean) == state.state assert self.min == state.attributes.get("min_value") assert self.max == state.attributes.get("max_value") assert self.variance == state.attributes.get("variance") assert self.median == state.attributes.get("median") assert self.deviation == state.attributes.get("standard_deviation") assert self.mean == state.attributes.get("mean") assert self.count == state.attributes.get("count") assert self.total == state.attributes.get("total") assert TEMP_CELSIUS == state.attributes.get("unit_of_measurement") assert self.change == state.attributes.get("change") assert self.average_change == state.attributes.get("average_change") def test_sampling_size(self): """Test rotation.""" assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", "sampling_size": 5, } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() for value in self.values: self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS} ) self.hass.block_till_done() state = self.hass.states.get("sensor.test") assert 3.8 == state.attributes.get("min_value") assert 14 == state.attributes.get("max_value") def test_sampling_size_1(self): """Test validity of stats requiring only one sample.""" assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", "sampling_size": 1, } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() for value in self.values[-3:]: # just the last 3 will do self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS} ) self.hass.block_till_done() state = self.hass.states.get("sensor.test") # require only one data point assert self.values[-1] == state.attributes.get("min_value") assert self.values[-1] == state.attributes.get("max_value") assert self.values[-1] == state.attributes.get("mean") assert self.values[-1] == state.attributes.get("median") assert self.values[-1] == state.attributes.get("total") assert 0 == state.attributes.get("change") assert 0 == state.attributes.get("average_change") # require at least two data points assert STATE_UNKNOWN == state.attributes.get("variance") assert STATE_UNKNOWN == state.attributes.get("standard_deviation") def test_max_age(self): """Test value deprecation.""" now = dt_util.utcnow() mock_data = { "return_time": datetime(now.year + 1, 8, 2, 12, 23, tzinfo=dt_util.UTC) } def mock_now(): return mock_data["return_time"] with patch( "homeassistant.components.statistics.sensor.dt_util.utcnow", new=mock_now ): assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", "max_age": {"minutes": 3}, } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() for value in self.values: self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS}, ) self.hass.block_till_done() # insert the next value one minute later mock_data["return_time"] += timedelta(minutes=1) state = self.hass.states.get("sensor.test") assert 6 == state.attributes.get("min_value") assert 14 == state.attributes.get("max_value") def test_max_age_without_sensor_change(self): """Test value deprecation.""" now = dt_util.utcnow() mock_data = { "return_time": datetime(now.year + 1, 8, 2, 12, 23, tzinfo=dt_util.UTC) } def mock_now(): return mock_data["return_time"] with patch( "homeassistant.components.statistics.sensor.dt_util.utcnow", new=mock_now ): assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", "max_age": {"minutes": 3}, } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() for value in self.values: self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS}, ) self.hass.block_till_done() # insert the next value 30 seconds later mock_data["return_time"] += timedelta(seconds=30) state = self.hass.states.get("sensor.test") assert 3.8 == state.attributes.get("min_value") assert 15.2 == state.attributes.get("max_value") # wait for 3 minutes (max_age). mock_data["return_time"] += timedelta(minutes=3) fire_time_changed(self.hass, mock_data["return_time"]) self.hass.block_till_done() state = self.hass.states.get("sensor.test") assert state.attributes.get("min_value") == STATE_UNKNOWN assert state.attributes.get("max_value") == STATE_UNKNOWN assert state.attributes.get("count") == 0 def test_change_rate(self): """Test min_age/max_age and change_rate.""" now = dt_util.utcnow() mock_data = { "return_time": datetime(now.year + 1, 8, 2, 12, 23, 42, tzinfo=dt_util.UTC) } def mock_now(): return mock_data["return_time"] with patch( "homeassistant.components.statistics.sensor.dt_util.utcnow", new=mock_now ): assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() for value in self.values: self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS}, ) self.hass.block_till_done() # insert the next value one minute later mock_data["return_time"] += timedelta(minutes=1) state = self.hass.states.get("sensor.test") assert datetime( now.year + 1, 8, 2, 12, 23, 42, tzinfo=dt_util.UTC ) == state.attributes.get("min_age") assert datetime( now.year + 1, 8, 2, 12, 23 + self.count - 1, 42, tzinfo=dt_util.UTC ) == state.attributes.get("max_age") assert self.change_rate == state.attributes.get("change_rate") def test_initialize_from_database(self): """Test initializing the statistics from the database.""" # enable the recorder init_recorder_component(self.hass) self.hass.block_till_done() self.hass.data[recorder.DATA_INSTANCE].block_till_done() # store some values for value in self.values: self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS} ) self.hass.block_till_done() # wait for the recorder to really store the data wait_recording_done(self.hass) # only now create the statistics component, so that it must read the # data from the database assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", "sampling_size": 100, } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() # check if the result is as in test_sensor_source() state = self.hass.states.get("sensor.test") assert str(self.mean) == state.state def test_initialize_from_database_with_maxage(self): """Test initializing the statistics from the database.""" now = dt_util.utcnow() mock_data = { "return_time": datetime(now.year + 1, 8, 2, 12, 23, 42, tzinfo=dt_util.UTC) } def mock_now(): return mock_data["return_time"] # Testing correct retrieval from recorder, thus we do not # want purging to occur within the class itself. def mock_purge(self): return # Set maximum age to 3 hours. max_age = 3 # Determine what our minimum age should be based on test values. expected_min_age = mock_data["return_time"] + timedelta( hours=len(self.values) - max_age ) # enable the recorder init_recorder_component(self.hass) self.hass.block_till_done() self.hass.data[recorder.DATA_INSTANCE].block_till_done() with patch( "homeassistant.components.statistics.sensor.dt_util.utcnow", new=mock_now ), patch.object(StatisticsSensor, "_purge_old", mock_purge): # store some values for value in self.values: self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS}, ) self.hass.block_till_done() # insert the next value 1 hour later mock_data["return_time"] += timedelta(hours=1) # wait for the recorder to really store the data wait_recording_done(self.hass) # only now create the statistics component, so that it must read # the data from the database assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", "sampling_size": 100, "max_age": {"hours": max_age}, } }, ) self.hass.block_till_done() self.hass.block_till_done() self.hass.start() self.hass.block_till_done() # check if the result is as in test_sensor_source() state = self.hass.states.get("sensor.test") assert expected_min_age == state.attributes.get("min_age") # The max_age timestamp should be 1 hour before what we have right # now in mock_data['return_time']. assert mock_data["return_time"] == state.attributes.get("max_age") + timedelta( hours=1 )
pschmitt/home-assistant
tests/components/statistics/test_sensor.py
homeassistant/components/mobile_app/config_flow.py
"""Config flow for Elexa Guardian integration.""" from aioguardian import Client from aioguardian.errors import GuardianError import voluptuous as vol from homeassistant import config_entries, core from homeassistant.const import CONF_IP_ADDRESS, CONF_PORT from homeassistant.core import callback from .const import CONF_UID, DOMAIN, LOGGER # pylint:disable=unused-import DATA_SCHEMA = vol.Schema( {vol.Required(CONF_IP_ADDRESS): str, vol.Required(CONF_PORT, default=7777): int} ) UNIQUE_ID = "guardian_{0}" @callback def async_get_pin_from_discovery_hostname(hostname): """Get the device's 4-digit PIN from its zeroconf-discovered hostname.""" return hostname.split(".")[0].split("-")[1] @callback def async_get_pin_from_uid(uid): """Get the device's 4-digit PIN from its UID.""" return uid[-4:] async def validate_input(hass: core.HomeAssistant, data): """Validate the user input allows us to connect. Data has the keys from DATA_SCHEMA with values provided by the user. """ async with Client(data[CONF_IP_ADDRESS]) as client: ping_data = await client.system.ping() return { CONF_UID: ping_data["data"]["uid"], } class ConfigFlow(config_entries.ConfigFlow, domain=DOMAIN): """Handle a config flow for Elexa Guardian.""" VERSION = 1 CONNECTION_CLASS = config_entries.CONN_CLASS_LOCAL_POLL def __init__(self): """Initialize.""" self.discovery_info = {} async def _async_set_unique_id(self, pin): """Set the config entry's unique ID (based on the device's 4-digit PIN).""" await self.async_set_unique_id(UNIQUE_ID.format(pin)) self._abort_if_unique_id_configured() async def async_step_user(self, user_input=None): """Handle configuration via the UI.""" if user_input is None: return self.async_show_form( step_id="user", data_schema=DATA_SCHEMA, errors={} ) try: info = await validate_input(self.hass, user_input) except GuardianError as err: LOGGER.error("Error while connecting to unit: %s", err) return self.async_show_form( step_id="user", data_schema=DATA_SCHEMA, errors={CONF_IP_ADDRESS: "cannot_connect"}, ) pin = async_get_pin_from_uid(info[CONF_UID]) await self._async_set_unique_id(pin) return self.async_create_entry( title=info[CONF_UID], data={CONF_UID: info["uid"], **user_input} ) async def async_step_zeroconf(self, discovery_info): """Handle the configuration via zeroconf.""" if discovery_info is None: return self.async_abort(reason="connection_error") pin = async_get_pin_from_discovery_hostname(discovery_info["hostname"]) await self._async_set_unique_id(pin) # pylint: disable=no-member # https://github.com/PyCQA/pylint/issues/3167 self.context[CONF_IP_ADDRESS] = discovery_info["host"] if any( discovery_info["host"] == flow["context"][CONF_IP_ADDRESS] for flow in self._async_in_progress() ): return self.async_abort(reason="already_in_progress") self.discovery_info = { CONF_IP_ADDRESS: discovery_info["host"], CONF_PORT: discovery_info["port"], } return await self.async_step_zeroconf_confirm() async def async_step_zeroconf_confirm(self, user_input=None): """Finish the configuration via zeroconf.""" if user_input is None: return self.async_show_form(step_id="zeroconf_confirm") return await self.async_step_user(self.discovery_info)
"""The test for the statistics sensor platform.""" from datetime import datetime, timedelta import statistics import unittest import pytest from homeassistant.components import recorder from homeassistant.components.statistics.sensor import StatisticsSensor from homeassistant.const import ATTR_UNIT_OF_MEASUREMENT, STATE_UNKNOWN, TEMP_CELSIUS from homeassistant.setup import setup_component from homeassistant.util import dt as dt_util from tests.async_mock import patch from tests.common import ( fire_time_changed, get_test_home_assistant, init_recorder_component, ) from tests.components.recorder.common import wait_recording_done @pytest.fixture(autouse=True) def mock_legacy_time(legacy_patchable_time): """Make time patchable for all the tests.""" yield class TestStatisticsSensor(unittest.TestCase): """Test the Statistics sensor.""" def setup_method(self, method): """Set up things to be run when tests are started.""" self.hass = get_test_home_assistant() self.values = [17, 20, 15.2, 5, 3.8, 9.2, 6.7, 14, 6] self.count = len(self.values) self.min = min(self.values) self.max = max(self.values) self.total = sum(self.values) self.mean = round(sum(self.values) / len(self.values), 2) self.median = round(statistics.median(self.values), 2) self.deviation = round(statistics.stdev(self.values), 2) self.variance = round(statistics.variance(self.values), 2) self.change = round(self.values[-1] - self.values[0], 2) self.average_change = round(self.change / (len(self.values) - 1), 2) self.change_rate = round(self.change / (60 * (self.count - 1)), 2) self.addCleanup(self.hass.stop) def test_binary_sensor_source(self): """Test if source is a sensor.""" values = ["on", "off", "on", "off", "on", "off", "on"] assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "binary_sensor.test_monitored", } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() for value in values: self.hass.states.set("binary_sensor.test_monitored", value) self.hass.block_till_done() state = self.hass.states.get("sensor.test") assert str(len(values)) == state.state def test_sensor_source(self): """Test if source is a sensor.""" assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() for value in self.values: self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS} ) self.hass.block_till_done() state = self.hass.states.get("sensor.test") assert str(self.mean) == state.state assert self.min == state.attributes.get("min_value") assert self.max == state.attributes.get("max_value") assert self.variance == state.attributes.get("variance") assert self.median == state.attributes.get("median") assert self.deviation == state.attributes.get("standard_deviation") assert self.mean == state.attributes.get("mean") assert self.count == state.attributes.get("count") assert self.total == state.attributes.get("total") assert TEMP_CELSIUS == state.attributes.get("unit_of_measurement") assert self.change == state.attributes.get("change") assert self.average_change == state.attributes.get("average_change") def test_sampling_size(self): """Test rotation.""" assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", "sampling_size": 5, } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() for value in self.values: self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS} ) self.hass.block_till_done() state = self.hass.states.get("sensor.test") assert 3.8 == state.attributes.get("min_value") assert 14 == state.attributes.get("max_value") def test_sampling_size_1(self): """Test validity of stats requiring only one sample.""" assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", "sampling_size": 1, } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() for value in self.values[-3:]: # just the last 3 will do self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS} ) self.hass.block_till_done() state = self.hass.states.get("sensor.test") # require only one data point assert self.values[-1] == state.attributes.get("min_value") assert self.values[-1] == state.attributes.get("max_value") assert self.values[-1] == state.attributes.get("mean") assert self.values[-1] == state.attributes.get("median") assert self.values[-1] == state.attributes.get("total") assert 0 == state.attributes.get("change") assert 0 == state.attributes.get("average_change") # require at least two data points assert STATE_UNKNOWN == state.attributes.get("variance") assert STATE_UNKNOWN == state.attributes.get("standard_deviation") def test_max_age(self): """Test value deprecation.""" now = dt_util.utcnow() mock_data = { "return_time": datetime(now.year + 1, 8, 2, 12, 23, tzinfo=dt_util.UTC) } def mock_now(): return mock_data["return_time"] with patch( "homeassistant.components.statistics.sensor.dt_util.utcnow", new=mock_now ): assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", "max_age": {"minutes": 3}, } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() for value in self.values: self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS}, ) self.hass.block_till_done() # insert the next value one minute later mock_data["return_time"] += timedelta(minutes=1) state = self.hass.states.get("sensor.test") assert 6 == state.attributes.get("min_value") assert 14 == state.attributes.get("max_value") def test_max_age_without_sensor_change(self): """Test value deprecation.""" now = dt_util.utcnow() mock_data = { "return_time": datetime(now.year + 1, 8, 2, 12, 23, tzinfo=dt_util.UTC) } def mock_now(): return mock_data["return_time"] with patch( "homeassistant.components.statistics.sensor.dt_util.utcnow", new=mock_now ): assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", "max_age": {"minutes": 3}, } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() for value in self.values: self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS}, ) self.hass.block_till_done() # insert the next value 30 seconds later mock_data["return_time"] += timedelta(seconds=30) state = self.hass.states.get("sensor.test") assert 3.8 == state.attributes.get("min_value") assert 15.2 == state.attributes.get("max_value") # wait for 3 minutes (max_age). mock_data["return_time"] += timedelta(minutes=3) fire_time_changed(self.hass, mock_data["return_time"]) self.hass.block_till_done() state = self.hass.states.get("sensor.test") assert state.attributes.get("min_value") == STATE_UNKNOWN assert state.attributes.get("max_value") == STATE_UNKNOWN assert state.attributes.get("count") == 0 def test_change_rate(self): """Test min_age/max_age and change_rate.""" now = dt_util.utcnow() mock_data = { "return_time": datetime(now.year + 1, 8, 2, 12, 23, 42, tzinfo=dt_util.UTC) } def mock_now(): return mock_data["return_time"] with patch( "homeassistant.components.statistics.sensor.dt_util.utcnow", new=mock_now ): assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() for value in self.values: self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS}, ) self.hass.block_till_done() # insert the next value one minute later mock_data["return_time"] += timedelta(minutes=1) state = self.hass.states.get("sensor.test") assert datetime( now.year + 1, 8, 2, 12, 23, 42, tzinfo=dt_util.UTC ) == state.attributes.get("min_age") assert datetime( now.year + 1, 8, 2, 12, 23 + self.count - 1, 42, tzinfo=dt_util.UTC ) == state.attributes.get("max_age") assert self.change_rate == state.attributes.get("change_rate") def test_initialize_from_database(self): """Test initializing the statistics from the database.""" # enable the recorder init_recorder_component(self.hass) self.hass.block_till_done() self.hass.data[recorder.DATA_INSTANCE].block_till_done() # store some values for value in self.values: self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS} ) self.hass.block_till_done() # wait for the recorder to really store the data wait_recording_done(self.hass) # only now create the statistics component, so that it must read the # data from the database assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", "sampling_size": 100, } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() # check if the result is as in test_sensor_source() state = self.hass.states.get("sensor.test") assert str(self.mean) == state.state def test_initialize_from_database_with_maxage(self): """Test initializing the statistics from the database.""" now = dt_util.utcnow() mock_data = { "return_time": datetime(now.year + 1, 8, 2, 12, 23, 42, tzinfo=dt_util.UTC) } def mock_now(): return mock_data["return_time"] # Testing correct retrieval from recorder, thus we do not # want purging to occur within the class itself. def mock_purge(self): return # Set maximum age to 3 hours. max_age = 3 # Determine what our minimum age should be based on test values. expected_min_age = mock_data["return_time"] + timedelta( hours=len(self.values) - max_age ) # enable the recorder init_recorder_component(self.hass) self.hass.block_till_done() self.hass.data[recorder.DATA_INSTANCE].block_till_done() with patch( "homeassistant.components.statistics.sensor.dt_util.utcnow", new=mock_now ), patch.object(StatisticsSensor, "_purge_old", mock_purge): # store some values for value in self.values: self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS}, ) self.hass.block_till_done() # insert the next value 1 hour later mock_data["return_time"] += timedelta(hours=1) # wait for the recorder to really store the data wait_recording_done(self.hass) # only now create the statistics component, so that it must read # the data from the database assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", "sampling_size": 100, "max_age": {"hours": max_age}, } }, ) self.hass.block_till_done() self.hass.block_till_done() self.hass.start() self.hass.block_till_done() # check if the result is as in test_sensor_source() state = self.hass.states.get("sensor.test") assert expected_min_age == state.attributes.get("min_age") # The max_age timestamp should be 1 hour before what we have right # now in mock_data['return_time']. assert mock_data["return_time"] == state.attributes.get("max_age") + timedelta( hours=1 )
pschmitt/home-assistant
tests/components/statistics/test_sensor.py
homeassistant/components/guardian/config_flow.py
"""Config flow for Monoprice 6-Zone Amplifier integration.""" import logging from pymonoprice import get_async_monoprice from serial import SerialException import voluptuous as vol from homeassistant import config_entries, core, exceptions from homeassistant.const import CONF_PORT from .const import ( CONF_SOURCE_1, CONF_SOURCE_2, CONF_SOURCE_3, CONF_SOURCE_4, CONF_SOURCE_5, CONF_SOURCE_6, CONF_SOURCES, ) from .const import DOMAIN # pylint:disable=unused-import _LOGGER = logging.getLogger(__name__) SOURCES = [ CONF_SOURCE_1, CONF_SOURCE_2, CONF_SOURCE_3, CONF_SOURCE_4, CONF_SOURCE_5, CONF_SOURCE_6, ] OPTIONS_FOR_DATA = {vol.Optional(source): str for source in SOURCES} DATA_SCHEMA = vol.Schema({vol.Required(CONF_PORT): str, **OPTIONS_FOR_DATA}) @core.callback def _sources_from_config(data): sources_config = { str(idx + 1): data.get(source) for idx, source in enumerate(SOURCES) } return { index: name.strip() for index, name in sources_config.items() if (name is not None and name.strip() != "") } async def validate_input(hass: core.HomeAssistant, data): """Validate the user input allows us to connect. Data has the keys from DATA_SCHEMA with values provided by the user. """ try: await get_async_monoprice(data[CONF_PORT], hass.loop) except SerialException: _LOGGER.error("Error connecting to Monoprice controller") raise CannotConnect sources = _sources_from_config(data) # Return info that you want to store in the config entry. return {CONF_PORT: data[CONF_PORT], CONF_SOURCES: sources} class ConfigFlow(config_entries.ConfigFlow, domain=DOMAIN): """Handle a config flow for Monoprice 6-Zone Amplifier.""" VERSION = 1 CONNECTION_CLASS = config_entries.CONN_CLASS_LOCAL_POLL async def async_step_user(self, user_input=None): """Handle the initial step.""" errors = {} if user_input is not None: try: info = await validate_input(self.hass, user_input) return self.async_create_entry(title=user_input[CONF_PORT], data=info) except CannotConnect: errors["base"] = "cannot_connect" except Exception: # pylint: disable=broad-except _LOGGER.exception("Unexpected exception") errors["base"] = "unknown" return self.async_show_form( step_id="user", data_schema=DATA_SCHEMA, errors=errors ) @staticmethod @core.callback def async_get_options_flow(config_entry): """Define the config flow to handle options.""" return MonopriceOptionsFlowHandler(config_entry) @core.callback def _key_for_source(index, source, previous_sources): if str(index) in previous_sources: key = vol.Optional( source, description={"suggested_value": previous_sources[str(index)]} ) else: key = vol.Optional(source) return key class MonopriceOptionsFlowHandler(config_entries.OptionsFlow): """Handle a Monoprice options flow.""" def __init__(self, config_entry): """Initialize.""" self.config_entry = config_entry @core.callback def _previous_sources(self): if CONF_SOURCES in self.config_entry.options: previous = self.config_entry.options[CONF_SOURCES] else: previous = self.config_entry.data[CONF_SOURCES] return previous async def async_step_init(self, user_input=None): """Manage the options.""" if user_input is not None: return self.async_create_entry( title="", data={CONF_SOURCES: _sources_from_config(user_input)} ) previous_sources = self._previous_sources() options = { _key_for_source(idx + 1, source, previous_sources): str for idx, source in enumerate(SOURCES) } return self.async_show_form(step_id="init", data_schema=vol.Schema(options),) class CannotConnect(exceptions.HomeAssistantError): """Error to indicate we cannot connect."""
"""The test for the statistics sensor platform.""" from datetime import datetime, timedelta import statistics import unittest import pytest from homeassistant.components import recorder from homeassistant.components.statistics.sensor import StatisticsSensor from homeassistant.const import ATTR_UNIT_OF_MEASUREMENT, STATE_UNKNOWN, TEMP_CELSIUS from homeassistant.setup import setup_component from homeassistant.util import dt as dt_util from tests.async_mock import patch from tests.common import ( fire_time_changed, get_test_home_assistant, init_recorder_component, ) from tests.components.recorder.common import wait_recording_done @pytest.fixture(autouse=True) def mock_legacy_time(legacy_patchable_time): """Make time patchable for all the tests.""" yield class TestStatisticsSensor(unittest.TestCase): """Test the Statistics sensor.""" def setup_method(self, method): """Set up things to be run when tests are started.""" self.hass = get_test_home_assistant() self.values = [17, 20, 15.2, 5, 3.8, 9.2, 6.7, 14, 6] self.count = len(self.values) self.min = min(self.values) self.max = max(self.values) self.total = sum(self.values) self.mean = round(sum(self.values) / len(self.values), 2) self.median = round(statistics.median(self.values), 2) self.deviation = round(statistics.stdev(self.values), 2) self.variance = round(statistics.variance(self.values), 2) self.change = round(self.values[-1] - self.values[0], 2) self.average_change = round(self.change / (len(self.values) - 1), 2) self.change_rate = round(self.change / (60 * (self.count - 1)), 2) self.addCleanup(self.hass.stop) def test_binary_sensor_source(self): """Test if source is a sensor.""" values = ["on", "off", "on", "off", "on", "off", "on"] assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "binary_sensor.test_monitored", } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() for value in values: self.hass.states.set("binary_sensor.test_monitored", value) self.hass.block_till_done() state = self.hass.states.get("sensor.test") assert str(len(values)) == state.state def test_sensor_source(self): """Test if source is a sensor.""" assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() for value in self.values: self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS} ) self.hass.block_till_done() state = self.hass.states.get("sensor.test") assert str(self.mean) == state.state assert self.min == state.attributes.get("min_value") assert self.max == state.attributes.get("max_value") assert self.variance == state.attributes.get("variance") assert self.median == state.attributes.get("median") assert self.deviation == state.attributes.get("standard_deviation") assert self.mean == state.attributes.get("mean") assert self.count == state.attributes.get("count") assert self.total == state.attributes.get("total") assert TEMP_CELSIUS == state.attributes.get("unit_of_measurement") assert self.change == state.attributes.get("change") assert self.average_change == state.attributes.get("average_change") def test_sampling_size(self): """Test rotation.""" assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", "sampling_size": 5, } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() for value in self.values: self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS} ) self.hass.block_till_done() state = self.hass.states.get("sensor.test") assert 3.8 == state.attributes.get("min_value") assert 14 == state.attributes.get("max_value") def test_sampling_size_1(self): """Test validity of stats requiring only one sample.""" assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", "sampling_size": 1, } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() for value in self.values[-3:]: # just the last 3 will do self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS} ) self.hass.block_till_done() state = self.hass.states.get("sensor.test") # require only one data point assert self.values[-1] == state.attributes.get("min_value") assert self.values[-1] == state.attributes.get("max_value") assert self.values[-1] == state.attributes.get("mean") assert self.values[-1] == state.attributes.get("median") assert self.values[-1] == state.attributes.get("total") assert 0 == state.attributes.get("change") assert 0 == state.attributes.get("average_change") # require at least two data points assert STATE_UNKNOWN == state.attributes.get("variance") assert STATE_UNKNOWN == state.attributes.get("standard_deviation") def test_max_age(self): """Test value deprecation.""" now = dt_util.utcnow() mock_data = { "return_time": datetime(now.year + 1, 8, 2, 12, 23, tzinfo=dt_util.UTC) } def mock_now(): return mock_data["return_time"] with patch( "homeassistant.components.statistics.sensor.dt_util.utcnow", new=mock_now ): assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", "max_age": {"minutes": 3}, } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() for value in self.values: self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS}, ) self.hass.block_till_done() # insert the next value one minute later mock_data["return_time"] += timedelta(minutes=1) state = self.hass.states.get("sensor.test") assert 6 == state.attributes.get("min_value") assert 14 == state.attributes.get("max_value") def test_max_age_without_sensor_change(self): """Test value deprecation.""" now = dt_util.utcnow() mock_data = { "return_time": datetime(now.year + 1, 8, 2, 12, 23, tzinfo=dt_util.UTC) } def mock_now(): return mock_data["return_time"] with patch( "homeassistant.components.statistics.sensor.dt_util.utcnow", new=mock_now ): assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", "max_age": {"minutes": 3}, } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() for value in self.values: self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS}, ) self.hass.block_till_done() # insert the next value 30 seconds later mock_data["return_time"] += timedelta(seconds=30) state = self.hass.states.get("sensor.test") assert 3.8 == state.attributes.get("min_value") assert 15.2 == state.attributes.get("max_value") # wait for 3 minutes (max_age). mock_data["return_time"] += timedelta(minutes=3) fire_time_changed(self.hass, mock_data["return_time"]) self.hass.block_till_done() state = self.hass.states.get("sensor.test") assert state.attributes.get("min_value") == STATE_UNKNOWN assert state.attributes.get("max_value") == STATE_UNKNOWN assert state.attributes.get("count") == 0 def test_change_rate(self): """Test min_age/max_age and change_rate.""" now = dt_util.utcnow() mock_data = { "return_time": datetime(now.year + 1, 8, 2, 12, 23, 42, tzinfo=dt_util.UTC) } def mock_now(): return mock_data["return_time"] with patch( "homeassistant.components.statistics.sensor.dt_util.utcnow", new=mock_now ): assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() for value in self.values: self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS}, ) self.hass.block_till_done() # insert the next value one minute later mock_data["return_time"] += timedelta(minutes=1) state = self.hass.states.get("sensor.test") assert datetime( now.year + 1, 8, 2, 12, 23, 42, tzinfo=dt_util.UTC ) == state.attributes.get("min_age") assert datetime( now.year + 1, 8, 2, 12, 23 + self.count - 1, 42, tzinfo=dt_util.UTC ) == state.attributes.get("max_age") assert self.change_rate == state.attributes.get("change_rate") def test_initialize_from_database(self): """Test initializing the statistics from the database.""" # enable the recorder init_recorder_component(self.hass) self.hass.block_till_done() self.hass.data[recorder.DATA_INSTANCE].block_till_done() # store some values for value in self.values: self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS} ) self.hass.block_till_done() # wait for the recorder to really store the data wait_recording_done(self.hass) # only now create the statistics component, so that it must read the # data from the database assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", "sampling_size": 100, } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() # check if the result is as in test_sensor_source() state = self.hass.states.get("sensor.test") assert str(self.mean) == state.state def test_initialize_from_database_with_maxage(self): """Test initializing the statistics from the database.""" now = dt_util.utcnow() mock_data = { "return_time": datetime(now.year + 1, 8, 2, 12, 23, 42, tzinfo=dt_util.UTC) } def mock_now(): return mock_data["return_time"] # Testing correct retrieval from recorder, thus we do not # want purging to occur within the class itself. def mock_purge(self): return # Set maximum age to 3 hours. max_age = 3 # Determine what our minimum age should be based on test values. expected_min_age = mock_data["return_time"] + timedelta( hours=len(self.values) - max_age ) # enable the recorder init_recorder_component(self.hass) self.hass.block_till_done() self.hass.data[recorder.DATA_INSTANCE].block_till_done() with patch( "homeassistant.components.statistics.sensor.dt_util.utcnow", new=mock_now ), patch.object(StatisticsSensor, "_purge_old", mock_purge): # store some values for value in self.values: self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS}, ) self.hass.block_till_done() # insert the next value 1 hour later mock_data["return_time"] += timedelta(hours=1) # wait for the recorder to really store the data wait_recording_done(self.hass) # only now create the statistics component, so that it must read # the data from the database assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", "sampling_size": 100, "max_age": {"hours": max_age}, } }, ) self.hass.block_till_done() self.hass.block_till_done() self.hass.start() self.hass.block_till_done() # check if the result is as in test_sensor_source() state = self.hass.states.get("sensor.test") assert expected_min_age == state.attributes.get("min_age") # The max_age timestamp should be 1 hour before what we have right # now in mock_data['return_time']. assert mock_data["return_time"] == state.attributes.get("max_age") + timedelta( hours=1 )
pschmitt/home-assistant
tests/components/statistics/test_sensor.py
homeassistant/components/monoprice/config_flow.py
"""Support for Gogogate2 garage Doors.""" import logging from typing import Callable, List, Optional from gogogate2_api.common import Door, DoorStatus, get_configured_doors, get_door_by_id import voluptuous as vol from homeassistant.components.cover import ( DEVICE_CLASS_GARAGE, SUPPORT_CLOSE, SUPPORT_OPEN, CoverEntity, ) from homeassistant.config_entries import SOURCE_IMPORT, ConfigEntry from homeassistant.const import CONF_IP_ADDRESS, CONF_PASSWORD, CONF_USERNAME from homeassistant.core import HomeAssistant, callback import homeassistant.helpers.config_validation as cv from homeassistant.helpers.entity import Entity from .common import ( GogoGateDataUpdateCoordinator, cover_unique_id, get_data_update_coordinator, ) from .const import DOMAIN _LOGGER = logging.getLogger(__name__) COVER_SCHEMA = vol.Schema( { vol.Required(CONF_IP_ADDRESS): cv.string, vol.Required(CONF_PASSWORD): cv.string, vol.Required(CONF_USERNAME): cv.string, } ) async def async_setup_platform( hass: HomeAssistant, config: dict, add_entities: Callable, discovery_info=None ) -> None: """Convert old style file configs to new style configs.""" hass.async_create_task( hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_IMPORT}, data=config ) ) async def async_setup_entry( hass: HomeAssistant, config_entry: ConfigEntry, async_add_entities: Callable[[List[Entity], Optional[bool]], None], ) -> None: """Set up the config entry.""" data_update_coordinator = get_data_update_coordinator(hass, config_entry) async_add_entities( [ Gogogate2Cover(config_entry, data_update_coordinator, door) for door in get_configured_doors(data_update_coordinator.data) ] ) class Gogogate2Cover(CoverEntity): """Cover entity for goggate2.""" def __init__( self, config_entry: ConfigEntry, data_update_coordinator: GogoGateDataUpdateCoordinator, door: Door, ) -> None: """Initialize the object.""" self._config_entry = config_entry self._data_update_coordinator = data_update_coordinator self._door = door self._api = data_update_coordinator.api self._unique_id = cover_unique_id(config_entry, door) self._is_available = True @property def available(self) -> bool: """Return True if entity is available.""" return self._is_available @property def should_poll(self) -> bool: """Return False as the data manager handles dispatching data.""" return False @property def unique_id(self) -> Optional[str]: """Return a unique ID.""" return self._unique_id @property def name(self): """Return the name of the door.""" return self._door.name @property def is_closed(self): """Return true if cover is closed, else False.""" if self._door.status == DoorStatus.OPENED: return False if self._door.status == DoorStatus.CLOSED: return True return None @property def device_class(self): """Return the class of this device, from component DEVICE_CLASSES.""" return DEVICE_CLASS_GARAGE @property def supported_features(self): """Flag supported features.""" return SUPPORT_OPEN | SUPPORT_CLOSE async def async_open_cover(self, **kwargs): """Open the door.""" await self.hass.async_add_executor_job(self._api.open_door, self._door.door_id) async def async_close_cover(self, **kwargs): """Close the door.""" await self.hass.async_add_executor_job(self._api.close_door, self._door.door_id) @property def state_attributes(self): """Return the state attributes.""" attrs = super().state_attributes attrs["door_id"] = self._door.door_id return attrs @callback def async_on_data_updated(self) -> None: """Receive data from data dispatcher.""" if not self._data_update_coordinator.last_update_success: self._is_available = False self.async_write_ha_state() return door = get_door_by_id(self._door.door_id, self._data_update_coordinator.data) # Set the state. self._door = door self._is_available = True self.async_write_ha_state() async def async_added_to_hass(self) -> None: """Register update dispatcher.""" self.async_on_remove( self._data_update_coordinator.async_add_listener(self.async_on_data_updated) )
"""The test for the statistics sensor platform.""" from datetime import datetime, timedelta import statistics import unittest import pytest from homeassistant.components import recorder from homeassistant.components.statistics.sensor import StatisticsSensor from homeassistant.const import ATTR_UNIT_OF_MEASUREMENT, STATE_UNKNOWN, TEMP_CELSIUS from homeassistant.setup import setup_component from homeassistant.util import dt as dt_util from tests.async_mock import patch from tests.common import ( fire_time_changed, get_test_home_assistant, init_recorder_component, ) from tests.components.recorder.common import wait_recording_done @pytest.fixture(autouse=True) def mock_legacy_time(legacy_patchable_time): """Make time patchable for all the tests.""" yield class TestStatisticsSensor(unittest.TestCase): """Test the Statistics sensor.""" def setup_method(self, method): """Set up things to be run when tests are started.""" self.hass = get_test_home_assistant() self.values = [17, 20, 15.2, 5, 3.8, 9.2, 6.7, 14, 6] self.count = len(self.values) self.min = min(self.values) self.max = max(self.values) self.total = sum(self.values) self.mean = round(sum(self.values) / len(self.values), 2) self.median = round(statistics.median(self.values), 2) self.deviation = round(statistics.stdev(self.values), 2) self.variance = round(statistics.variance(self.values), 2) self.change = round(self.values[-1] - self.values[0], 2) self.average_change = round(self.change / (len(self.values) - 1), 2) self.change_rate = round(self.change / (60 * (self.count - 1)), 2) self.addCleanup(self.hass.stop) def test_binary_sensor_source(self): """Test if source is a sensor.""" values = ["on", "off", "on", "off", "on", "off", "on"] assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "binary_sensor.test_monitored", } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() for value in values: self.hass.states.set("binary_sensor.test_monitored", value) self.hass.block_till_done() state = self.hass.states.get("sensor.test") assert str(len(values)) == state.state def test_sensor_source(self): """Test if source is a sensor.""" assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() for value in self.values: self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS} ) self.hass.block_till_done() state = self.hass.states.get("sensor.test") assert str(self.mean) == state.state assert self.min == state.attributes.get("min_value") assert self.max == state.attributes.get("max_value") assert self.variance == state.attributes.get("variance") assert self.median == state.attributes.get("median") assert self.deviation == state.attributes.get("standard_deviation") assert self.mean == state.attributes.get("mean") assert self.count == state.attributes.get("count") assert self.total == state.attributes.get("total") assert TEMP_CELSIUS == state.attributes.get("unit_of_measurement") assert self.change == state.attributes.get("change") assert self.average_change == state.attributes.get("average_change") def test_sampling_size(self): """Test rotation.""" assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", "sampling_size": 5, } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() for value in self.values: self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS} ) self.hass.block_till_done() state = self.hass.states.get("sensor.test") assert 3.8 == state.attributes.get("min_value") assert 14 == state.attributes.get("max_value") def test_sampling_size_1(self): """Test validity of stats requiring only one sample.""" assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", "sampling_size": 1, } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() for value in self.values[-3:]: # just the last 3 will do self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS} ) self.hass.block_till_done() state = self.hass.states.get("sensor.test") # require only one data point assert self.values[-1] == state.attributes.get("min_value") assert self.values[-1] == state.attributes.get("max_value") assert self.values[-1] == state.attributes.get("mean") assert self.values[-1] == state.attributes.get("median") assert self.values[-1] == state.attributes.get("total") assert 0 == state.attributes.get("change") assert 0 == state.attributes.get("average_change") # require at least two data points assert STATE_UNKNOWN == state.attributes.get("variance") assert STATE_UNKNOWN == state.attributes.get("standard_deviation") def test_max_age(self): """Test value deprecation.""" now = dt_util.utcnow() mock_data = { "return_time": datetime(now.year + 1, 8, 2, 12, 23, tzinfo=dt_util.UTC) } def mock_now(): return mock_data["return_time"] with patch( "homeassistant.components.statistics.sensor.dt_util.utcnow", new=mock_now ): assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", "max_age": {"minutes": 3}, } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() for value in self.values: self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS}, ) self.hass.block_till_done() # insert the next value one minute later mock_data["return_time"] += timedelta(minutes=1) state = self.hass.states.get("sensor.test") assert 6 == state.attributes.get("min_value") assert 14 == state.attributes.get("max_value") def test_max_age_without_sensor_change(self): """Test value deprecation.""" now = dt_util.utcnow() mock_data = { "return_time": datetime(now.year + 1, 8, 2, 12, 23, tzinfo=dt_util.UTC) } def mock_now(): return mock_data["return_time"] with patch( "homeassistant.components.statistics.sensor.dt_util.utcnow", new=mock_now ): assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", "max_age": {"minutes": 3}, } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() for value in self.values: self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS}, ) self.hass.block_till_done() # insert the next value 30 seconds later mock_data["return_time"] += timedelta(seconds=30) state = self.hass.states.get("sensor.test") assert 3.8 == state.attributes.get("min_value") assert 15.2 == state.attributes.get("max_value") # wait for 3 minutes (max_age). mock_data["return_time"] += timedelta(minutes=3) fire_time_changed(self.hass, mock_data["return_time"]) self.hass.block_till_done() state = self.hass.states.get("sensor.test") assert state.attributes.get("min_value") == STATE_UNKNOWN assert state.attributes.get("max_value") == STATE_UNKNOWN assert state.attributes.get("count") == 0 def test_change_rate(self): """Test min_age/max_age and change_rate.""" now = dt_util.utcnow() mock_data = { "return_time": datetime(now.year + 1, 8, 2, 12, 23, 42, tzinfo=dt_util.UTC) } def mock_now(): return mock_data["return_time"] with patch( "homeassistant.components.statistics.sensor.dt_util.utcnow", new=mock_now ): assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() for value in self.values: self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS}, ) self.hass.block_till_done() # insert the next value one minute later mock_data["return_time"] += timedelta(minutes=1) state = self.hass.states.get("sensor.test") assert datetime( now.year + 1, 8, 2, 12, 23, 42, tzinfo=dt_util.UTC ) == state.attributes.get("min_age") assert datetime( now.year + 1, 8, 2, 12, 23 + self.count - 1, 42, tzinfo=dt_util.UTC ) == state.attributes.get("max_age") assert self.change_rate == state.attributes.get("change_rate") def test_initialize_from_database(self): """Test initializing the statistics from the database.""" # enable the recorder init_recorder_component(self.hass) self.hass.block_till_done() self.hass.data[recorder.DATA_INSTANCE].block_till_done() # store some values for value in self.values: self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS} ) self.hass.block_till_done() # wait for the recorder to really store the data wait_recording_done(self.hass) # only now create the statistics component, so that it must read the # data from the database assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", "sampling_size": 100, } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() # check if the result is as in test_sensor_source() state = self.hass.states.get("sensor.test") assert str(self.mean) == state.state def test_initialize_from_database_with_maxage(self): """Test initializing the statistics from the database.""" now = dt_util.utcnow() mock_data = { "return_time": datetime(now.year + 1, 8, 2, 12, 23, 42, tzinfo=dt_util.UTC) } def mock_now(): return mock_data["return_time"] # Testing correct retrieval from recorder, thus we do not # want purging to occur within the class itself. def mock_purge(self): return # Set maximum age to 3 hours. max_age = 3 # Determine what our minimum age should be based on test values. expected_min_age = mock_data["return_time"] + timedelta( hours=len(self.values) - max_age ) # enable the recorder init_recorder_component(self.hass) self.hass.block_till_done() self.hass.data[recorder.DATA_INSTANCE].block_till_done() with patch( "homeassistant.components.statistics.sensor.dt_util.utcnow", new=mock_now ), patch.object(StatisticsSensor, "_purge_old", mock_purge): # store some values for value in self.values: self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS}, ) self.hass.block_till_done() # insert the next value 1 hour later mock_data["return_time"] += timedelta(hours=1) # wait for the recorder to really store the data wait_recording_done(self.hass) # only now create the statistics component, so that it must read # the data from the database assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", "sampling_size": 100, "max_age": {"hours": max_age}, } }, ) self.hass.block_till_done() self.hass.block_till_done() self.hass.start() self.hass.block_till_done() # check if the result is as in test_sensor_source() state = self.hass.states.get("sensor.test") assert expected_min_age == state.attributes.get("min_age") # The max_age timestamp should be 1 hour before what we have right # now in mock_data['return_time']. assert mock_data["return_time"] == state.attributes.get("max_age") + timedelta( hours=1 )
pschmitt/home-assistant
tests/components/statistics/test_sensor.py
homeassistant/components/gogogate2/cover.py
"""Allows to configure a switch using RPi GPIO.""" import logging import voluptuous as vol from homeassistant.components import rpi_gpio from homeassistant.components.switch import PLATFORM_SCHEMA from homeassistant.const import DEVICE_DEFAULT_NAME import homeassistant.helpers.config_validation as cv from homeassistant.helpers.entity import ToggleEntity _LOGGER = logging.getLogger(__name__) CONF_PULL_MODE = "pull_mode" CONF_PORTS = "ports" CONF_INVERT_LOGIC = "invert_logic" DEFAULT_INVERT_LOGIC = False _SWITCHES_SCHEMA = vol.Schema({cv.positive_int: cv.string}) PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( { vol.Required(CONF_PORTS): _SWITCHES_SCHEMA, vol.Optional(CONF_INVERT_LOGIC, default=DEFAULT_INVERT_LOGIC): cv.boolean, } ) def setup_platform(hass, config, add_entities, discovery_info=None): """Set up the Raspberry PI GPIO devices.""" invert_logic = config.get(CONF_INVERT_LOGIC) switches = [] ports = config.get(CONF_PORTS) for port, name in ports.items(): switches.append(RPiGPIOSwitch(name, port, invert_logic)) add_entities(switches) class RPiGPIOSwitch(ToggleEntity): """Representation of a Raspberry Pi GPIO.""" def __init__(self, name, port, invert_logic): """Initialize the pin.""" self._name = name or DEVICE_DEFAULT_NAME self._port = port self._invert_logic = invert_logic self._state = False rpi_gpio.setup_output(self._port) rpi_gpio.write_output(self._port, 1 if self._invert_logic else 0) @property def name(self): """Return the name of the switch.""" return self._name @property def should_poll(self): """No polling needed.""" return False @property def is_on(self): """Return true if device is on.""" return self._state def turn_on(self, **kwargs): """Turn the device on.""" rpi_gpio.write_output(self._port, 0 if self._invert_logic else 1) self._state = True self.schedule_update_ha_state() def turn_off(self, **kwargs): """Turn the device off.""" rpi_gpio.write_output(self._port, 1 if self._invert_logic else 0) self._state = False self.schedule_update_ha_state()
"""The test for the statistics sensor platform.""" from datetime import datetime, timedelta import statistics import unittest import pytest from homeassistant.components import recorder from homeassistant.components.statistics.sensor import StatisticsSensor from homeassistant.const import ATTR_UNIT_OF_MEASUREMENT, STATE_UNKNOWN, TEMP_CELSIUS from homeassistant.setup import setup_component from homeassistant.util import dt as dt_util from tests.async_mock import patch from tests.common import ( fire_time_changed, get_test_home_assistant, init_recorder_component, ) from tests.components.recorder.common import wait_recording_done @pytest.fixture(autouse=True) def mock_legacy_time(legacy_patchable_time): """Make time patchable for all the tests.""" yield class TestStatisticsSensor(unittest.TestCase): """Test the Statistics sensor.""" def setup_method(self, method): """Set up things to be run when tests are started.""" self.hass = get_test_home_assistant() self.values = [17, 20, 15.2, 5, 3.8, 9.2, 6.7, 14, 6] self.count = len(self.values) self.min = min(self.values) self.max = max(self.values) self.total = sum(self.values) self.mean = round(sum(self.values) / len(self.values), 2) self.median = round(statistics.median(self.values), 2) self.deviation = round(statistics.stdev(self.values), 2) self.variance = round(statistics.variance(self.values), 2) self.change = round(self.values[-1] - self.values[0], 2) self.average_change = round(self.change / (len(self.values) - 1), 2) self.change_rate = round(self.change / (60 * (self.count - 1)), 2) self.addCleanup(self.hass.stop) def test_binary_sensor_source(self): """Test if source is a sensor.""" values = ["on", "off", "on", "off", "on", "off", "on"] assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "binary_sensor.test_monitored", } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() for value in values: self.hass.states.set("binary_sensor.test_monitored", value) self.hass.block_till_done() state = self.hass.states.get("sensor.test") assert str(len(values)) == state.state def test_sensor_source(self): """Test if source is a sensor.""" assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() for value in self.values: self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS} ) self.hass.block_till_done() state = self.hass.states.get("sensor.test") assert str(self.mean) == state.state assert self.min == state.attributes.get("min_value") assert self.max == state.attributes.get("max_value") assert self.variance == state.attributes.get("variance") assert self.median == state.attributes.get("median") assert self.deviation == state.attributes.get("standard_deviation") assert self.mean == state.attributes.get("mean") assert self.count == state.attributes.get("count") assert self.total == state.attributes.get("total") assert TEMP_CELSIUS == state.attributes.get("unit_of_measurement") assert self.change == state.attributes.get("change") assert self.average_change == state.attributes.get("average_change") def test_sampling_size(self): """Test rotation.""" assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", "sampling_size": 5, } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() for value in self.values: self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS} ) self.hass.block_till_done() state = self.hass.states.get("sensor.test") assert 3.8 == state.attributes.get("min_value") assert 14 == state.attributes.get("max_value") def test_sampling_size_1(self): """Test validity of stats requiring only one sample.""" assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", "sampling_size": 1, } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() for value in self.values[-3:]: # just the last 3 will do self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS} ) self.hass.block_till_done() state = self.hass.states.get("sensor.test") # require only one data point assert self.values[-1] == state.attributes.get("min_value") assert self.values[-1] == state.attributes.get("max_value") assert self.values[-1] == state.attributes.get("mean") assert self.values[-1] == state.attributes.get("median") assert self.values[-1] == state.attributes.get("total") assert 0 == state.attributes.get("change") assert 0 == state.attributes.get("average_change") # require at least two data points assert STATE_UNKNOWN == state.attributes.get("variance") assert STATE_UNKNOWN == state.attributes.get("standard_deviation") def test_max_age(self): """Test value deprecation.""" now = dt_util.utcnow() mock_data = { "return_time": datetime(now.year + 1, 8, 2, 12, 23, tzinfo=dt_util.UTC) } def mock_now(): return mock_data["return_time"] with patch( "homeassistant.components.statistics.sensor.dt_util.utcnow", new=mock_now ): assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", "max_age": {"minutes": 3}, } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() for value in self.values: self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS}, ) self.hass.block_till_done() # insert the next value one minute later mock_data["return_time"] += timedelta(minutes=1) state = self.hass.states.get("sensor.test") assert 6 == state.attributes.get("min_value") assert 14 == state.attributes.get("max_value") def test_max_age_without_sensor_change(self): """Test value deprecation.""" now = dt_util.utcnow() mock_data = { "return_time": datetime(now.year + 1, 8, 2, 12, 23, tzinfo=dt_util.UTC) } def mock_now(): return mock_data["return_time"] with patch( "homeassistant.components.statistics.sensor.dt_util.utcnow", new=mock_now ): assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", "max_age": {"minutes": 3}, } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() for value in self.values: self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS}, ) self.hass.block_till_done() # insert the next value 30 seconds later mock_data["return_time"] += timedelta(seconds=30) state = self.hass.states.get("sensor.test") assert 3.8 == state.attributes.get("min_value") assert 15.2 == state.attributes.get("max_value") # wait for 3 minutes (max_age). mock_data["return_time"] += timedelta(minutes=3) fire_time_changed(self.hass, mock_data["return_time"]) self.hass.block_till_done() state = self.hass.states.get("sensor.test") assert state.attributes.get("min_value") == STATE_UNKNOWN assert state.attributes.get("max_value") == STATE_UNKNOWN assert state.attributes.get("count") == 0 def test_change_rate(self): """Test min_age/max_age and change_rate.""" now = dt_util.utcnow() mock_data = { "return_time": datetime(now.year + 1, 8, 2, 12, 23, 42, tzinfo=dt_util.UTC) } def mock_now(): return mock_data["return_time"] with patch( "homeassistant.components.statistics.sensor.dt_util.utcnow", new=mock_now ): assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() for value in self.values: self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS}, ) self.hass.block_till_done() # insert the next value one minute later mock_data["return_time"] += timedelta(minutes=1) state = self.hass.states.get("sensor.test") assert datetime( now.year + 1, 8, 2, 12, 23, 42, tzinfo=dt_util.UTC ) == state.attributes.get("min_age") assert datetime( now.year + 1, 8, 2, 12, 23 + self.count - 1, 42, tzinfo=dt_util.UTC ) == state.attributes.get("max_age") assert self.change_rate == state.attributes.get("change_rate") def test_initialize_from_database(self): """Test initializing the statistics from the database.""" # enable the recorder init_recorder_component(self.hass) self.hass.block_till_done() self.hass.data[recorder.DATA_INSTANCE].block_till_done() # store some values for value in self.values: self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS} ) self.hass.block_till_done() # wait for the recorder to really store the data wait_recording_done(self.hass) # only now create the statistics component, so that it must read the # data from the database assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", "sampling_size": 100, } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() # check if the result is as in test_sensor_source() state = self.hass.states.get("sensor.test") assert str(self.mean) == state.state def test_initialize_from_database_with_maxage(self): """Test initializing the statistics from the database.""" now = dt_util.utcnow() mock_data = { "return_time": datetime(now.year + 1, 8, 2, 12, 23, 42, tzinfo=dt_util.UTC) } def mock_now(): return mock_data["return_time"] # Testing correct retrieval from recorder, thus we do not # want purging to occur within the class itself. def mock_purge(self): return # Set maximum age to 3 hours. max_age = 3 # Determine what our minimum age should be based on test values. expected_min_age = mock_data["return_time"] + timedelta( hours=len(self.values) - max_age ) # enable the recorder init_recorder_component(self.hass) self.hass.block_till_done() self.hass.data[recorder.DATA_INSTANCE].block_till_done() with patch( "homeassistant.components.statistics.sensor.dt_util.utcnow", new=mock_now ), patch.object(StatisticsSensor, "_purge_old", mock_purge): # store some values for value in self.values: self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS}, ) self.hass.block_till_done() # insert the next value 1 hour later mock_data["return_time"] += timedelta(hours=1) # wait for the recorder to really store the data wait_recording_done(self.hass) # only now create the statistics component, so that it must read # the data from the database assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", "sampling_size": 100, "max_age": {"hours": max_age}, } }, ) self.hass.block_till_done() self.hass.block_till_done() self.hass.start() self.hass.block_till_done() # check if the result is as in test_sensor_source() state = self.hass.states.get("sensor.test") assert expected_min_age == state.attributes.get("min_age") # The max_age timestamp should be 1 hour before what we have right # now in mock_data['return_time']. assert mock_data["return_time"] == state.attributes.get("max_age") + timedelta( hours=1 )
pschmitt/home-assistant
tests/components/statistics/test_sensor.py
homeassistant/components/rpi_gpio/switch.py
"""Support for the OpenWeatherMap (OWM) service.""" from datetime import timedelta import logging from pyowm import OWM from pyowm.exceptions.api_call_error import APICallError import voluptuous as vol from homeassistant.components.weather import ( ATTR_FORECAST_CONDITION, ATTR_FORECAST_PRECIPITATION, ATTR_FORECAST_TEMP, ATTR_FORECAST_TEMP_LOW, ATTR_FORECAST_TIME, ATTR_FORECAST_WIND_BEARING, ATTR_FORECAST_WIND_SPEED, PLATFORM_SCHEMA, WeatherEntity, ) from homeassistant.const import ( CONF_API_KEY, CONF_LATITUDE, CONF_LONGITUDE, CONF_MODE, CONF_NAME, PRESSURE_HPA, PRESSURE_INHG, STATE_UNKNOWN, TEMP_CELSIUS, ) import homeassistant.helpers.config_validation as cv from homeassistant.util import Throttle from homeassistant.util.pressure import convert as convert_pressure _LOGGER = logging.getLogger(__name__) ATTRIBUTION = "Data provided by OpenWeatherMap" FORECAST_MODE = ["hourly", "daily", "freedaily"] DEFAULT_NAME = "OpenWeatherMap" MIN_TIME_BETWEEN_FORECAST_UPDATES = timedelta(minutes=30) MIN_TIME_BETWEEN_UPDATES = timedelta(minutes=10) CONDITION_CLASSES = { "cloudy": [803, 804], "fog": [701, 741], "hail": [906], "lightning": [210, 211, 212, 221], "lightning-rainy": [200, 201, 202, 230, 231, 232], "partlycloudy": [801, 802], "pouring": [504, 314, 502, 503, 522], "rainy": [300, 301, 302, 310, 311, 312, 313, 500, 501, 520, 521], "snowy": [600, 601, 602, 611, 612, 620, 621, 622], "snowy-rainy": [511, 615, 616], "sunny": [800], "windy": [905, 951, 952, 953, 954, 955, 956, 957], "windy-variant": [958, 959, 960, 961], "exceptional": [711, 721, 731, 751, 761, 762, 771, 900, 901, 962, 903, 904], } PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( { vol.Required(CONF_API_KEY): cv.string, vol.Optional(CONF_LATITUDE): cv.latitude, vol.Optional(CONF_LONGITUDE): cv.longitude, vol.Optional(CONF_MODE, default="hourly"): vol.In(FORECAST_MODE), vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string, } ) def setup_platform(hass, config, add_entities, discovery_info=None): """Set up the OpenWeatherMap weather platform.""" longitude = config.get(CONF_LONGITUDE, round(hass.config.longitude, 5)) latitude = config.get(CONF_LATITUDE, round(hass.config.latitude, 5)) name = config.get(CONF_NAME) mode = config.get(CONF_MODE) try: owm = OWM(config.get(CONF_API_KEY)) except APICallError: _LOGGER.error("Error while connecting to OpenWeatherMap") return False data = WeatherData(owm, latitude, longitude, mode) add_entities( [OpenWeatherMapWeather(name, data, hass.config.units.temperature_unit, mode)], True, ) class OpenWeatherMapWeather(WeatherEntity): """Implementation of an OpenWeatherMap sensor.""" def __init__(self, name, owm, temperature_unit, mode): """Initialize the sensor.""" self._name = name self._owm = owm self._temperature_unit = temperature_unit self._mode = mode self.data = None self.forecast_data = None @property def name(self): """Return the name of the sensor.""" return self._name @property def condition(self): """Return the current condition.""" try: return [ k for k, v in CONDITION_CLASSES.items() if self.data.get_weather_code() in v ][0] except IndexError: return STATE_UNKNOWN @property def temperature(self): """Return the temperature.""" return self.data.get_temperature("celsius").get("temp") @property def temperature_unit(self): """Return the unit of measurement.""" return TEMP_CELSIUS @property def pressure(self): """Return the pressure.""" pressure = self.data.get_pressure().get("press") if self.hass.config.units.name == "imperial": return round(convert_pressure(pressure, PRESSURE_HPA, PRESSURE_INHG), 2) return pressure @property def humidity(self): """Return the humidity.""" return self.data.get_humidity() @property def wind_speed(self): """Return the wind speed.""" if self.hass.config.units.name == "imperial": return round(self.data.get_wind().get("speed") * 2.24, 2) return round(self.data.get_wind().get("speed") * 3.6, 2) @property def wind_bearing(self): """Return the wind bearing.""" return self.data.get_wind().get("deg") @property def attribution(self): """Return the attribution.""" return ATTRIBUTION @property def forecast(self): """Return the forecast array.""" data = [] def calc_precipitation(rain, snow): """Calculate the precipitation.""" rain_value = 0 if rain is None else rain snow_value = 0 if snow is None else snow if round(rain_value + snow_value, 1) == 0: return None return round(rain_value + snow_value, 1) if self._mode == "freedaily": weather = self.forecast_data.get_weathers()[::8] else: weather = self.forecast_data.get_weathers() for entry in weather: if self._mode == "daily": data.append( { ATTR_FORECAST_TIME: entry.get_reference_time("unix") * 1000, ATTR_FORECAST_TEMP: entry.get_temperature("celsius").get("day"), ATTR_FORECAST_TEMP_LOW: entry.get_temperature("celsius").get( "night" ), ATTR_FORECAST_PRECIPITATION: calc_precipitation( entry.get_rain().get("all"), entry.get_snow().get("all") ), ATTR_FORECAST_WIND_SPEED: entry.get_wind().get("speed"), ATTR_FORECAST_WIND_BEARING: entry.get_wind().get("deg"), ATTR_FORECAST_CONDITION: [ k for k, v in CONDITION_CLASSES.items() if entry.get_weather_code() in v ][0], } ) else: data.append( { ATTR_FORECAST_TIME: entry.get_reference_time("unix") * 1000, ATTR_FORECAST_TEMP: entry.get_temperature("celsius").get( "temp" ), ATTR_FORECAST_PRECIPITATION: ( round(entry.get_rain().get("3h"), 1) if entry.get_rain().get("3h") is not None and (round(entry.get_rain().get("3h"), 1) > 0) else None ), ATTR_FORECAST_CONDITION: [ k for k, v in CONDITION_CLASSES.items() if entry.get_weather_code() in v ][0], } ) return data def update(self): """Get the latest data from OWM and updates the states.""" try: self._owm.update() self._owm.update_forecast() except APICallError: _LOGGER.error("Exception when calling OWM web API to update data") return self.data = self._owm.data self.forecast_data = self._owm.forecast_data class WeatherData: """Get the latest data from OpenWeatherMap.""" def __init__(self, owm, latitude, longitude, mode): """Initialize the data object.""" self._mode = mode self.owm = owm self.latitude = latitude self.longitude = longitude self.data = None self.forecast_data = None @Throttle(MIN_TIME_BETWEEN_UPDATES) def update(self): """Get the latest data from OpenWeatherMap.""" obs = self.owm.weather_at_coords(self.latitude, self.longitude) if obs is None: _LOGGER.warning("Failed to fetch data from OWM") return self.data = obs.get_weather() @Throttle(MIN_TIME_BETWEEN_FORECAST_UPDATES) def update_forecast(self): """Get the latest forecast from OpenWeatherMap.""" try: if self._mode == "daily": fcd = self.owm.daily_forecast_at_coords( self.latitude, self.longitude, 15 ) else: fcd = self.owm.three_hours_forecast_at_coords( self.latitude, self.longitude ) except APICallError: _LOGGER.error("Exception when calling OWM web API to update forecast") return if fcd is None: _LOGGER.warning("Failed to fetch forecast data from OWM") return self.forecast_data = fcd.get_forecast()
"""The test for the statistics sensor platform.""" from datetime import datetime, timedelta import statistics import unittest import pytest from homeassistant.components import recorder from homeassistant.components.statistics.sensor import StatisticsSensor from homeassistant.const import ATTR_UNIT_OF_MEASUREMENT, STATE_UNKNOWN, TEMP_CELSIUS from homeassistant.setup import setup_component from homeassistant.util import dt as dt_util from tests.async_mock import patch from tests.common import ( fire_time_changed, get_test_home_assistant, init_recorder_component, ) from tests.components.recorder.common import wait_recording_done @pytest.fixture(autouse=True) def mock_legacy_time(legacy_patchable_time): """Make time patchable for all the tests.""" yield class TestStatisticsSensor(unittest.TestCase): """Test the Statistics sensor.""" def setup_method(self, method): """Set up things to be run when tests are started.""" self.hass = get_test_home_assistant() self.values = [17, 20, 15.2, 5, 3.8, 9.2, 6.7, 14, 6] self.count = len(self.values) self.min = min(self.values) self.max = max(self.values) self.total = sum(self.values) self.mean = round(sum(self.values) / len(self.values), 2) self.median = round(statistics.median(self.values), 2) self.deviation = round(statistics.stdev(self.values), 2) self.variance = round(statistics.variance(self.values), 2) self.change = round(self.values[-1] - self.values[0], 2) self.average_change = round(self.change / (len(self.values) - 1), 2) self.change_rate = round(self.change / (60 * (self.count - 1)), 2) self.addCleanup(self.hass.stop) def test_binary_sensor_source(self): """Test if source is a sensor.""" values = ["on", "off", "on", "off", "on", "off", "on"] assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "binary_sensor.test_monitored", } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() for value in values: self.hass.states.set("binary_sensor.test_monitored", value) self.hass.block_till_done() state = self.hass.states.get("sensor.test") assert str(len(values)) == state.state def test_sensor_source(self): """Test if source is a sensor.""" assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() for value in self.values: self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS} ) self.hass.block_till_done() state = self.hass.states.get("sensor.test") assert str(self.mean) == state.state assert self.min == state.attributes.get("min_value") assert self.max == state.attributes.get("max_value") assert self.variance == state.attributes.get("variance") assert self.median == state.attributes.get("median") assert self.deviation == state.attributes.get("standard_deviation") assert self.mean == state.attributes.get("mean") assert self.count == state.attributes.get("count") assert self.total == state.attributes.get("total") assert TEMP_CELSIUS == state.attributes.get("unit_of_measurement") assert self.change == state.attributes.get("change") assert self.average_change == state.attributes.get("average_change") def test_sampling_size(self): """Test rotation.""" assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", "sampling_size": 5, } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() for value in self.values: self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS} ) self.hass.block_till_done() state = self.hass.states.get("sensor.test") assert 3.8 == state.attributes.get("min_value") assert 14 == state.attributes.get("max_value") def test_sampling_size_1(self): """Test validity of stats requiring only one sample.""" assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", "sampling_size": 1, } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() for value in self.values[-3:]: # just the last 3 will do self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS} ) self.hass.block_till_done() state = self.hass.states.get("sensor.test") # require only one data point assert self.values[-1] == state.attributes.get("min_value") assert self.values[-1] == state.attributes.get("max_value") assert self.values[-1] == state.attributes.get("mean") assert self.values[-1] == state.attributes.get("median") assert self.values[-1] == state.attributes.get("total") assert 0 == state.attributes.get("change") assert 0 == state.attributes.get("average_change") # require at least two data points assert STATE_UNKNOWN == state.attributes.get("variance") assert STATE_UNKNOWN == state.attributes.get("standard_deviation") def test_max_age(self): """Test value deprecation.""" now = dt_util.utcnow() mock_data = { "return_time": datetime(now.year + 1, 8, 2, 12, 23, tzinfo=dt_util.UTC) } def mock_now(): return mock_data["return_time"] with patch( "homeassistant.components.statistics.sensor.dt_util.utcnow", new=mock_now ): assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", "max_age": {"minutes": 3}, } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() for value in self.values: self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS}, ) self.hass.block_till_done() # insert the next value one minute later mock_data["return_time"] += timedelta(minutes=1) state = self.hass.states.get("sensor.test") assert 6 == state.attributes.get("min_value") assert 14 == state.attributes.get("max_value") def test_max_age_without_sensor_change(self): """Test value deprecation.""" now = dt_util.utcnow() mock_data = { "return_time": datetime(now.year + 1, 8, 2, 12, 23, tzinfo=dt_util.UTC) } def mock_now(): return mock_data["return_time"] with patch( "homeassistant.components.statistics.sensor.dt_util.utcnow", new=mock_now ): assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", "max_age": {"minutes": 3}, } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() for value in self.values: self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS}, ) self.hass.block_till_done() # insert the next value 30 seconds later mock_data["return_time"] += timedelta(seconds=30) state = self.hass.states.get("sensor.test") assert 3.8 == state.attributes.get("min_value") assert 15.2 == state.attributes.get("max_value") # wait for 3 minutes (max_age). mock_data["return_time"] += timedelta(minutes=3) fire_time_changed(self.hass, mock_data["return_time"]) self.hass.block_till_done() state = self.hass.states.get("sensor.test") assert state.attributes.get("min_value") == STATE_UNKNOWN assert state.attributes.get("max_value") == STATE_UNKNOWN assert state.attributes.get("count") == 0 def test_change_rate(self): """Test min_age/max_age and change_rate.""" now = dt_util.utcnow() mock_data = { "return_time": datetime(now.year + 1, 8, 2, 12, 23, 42, tzinfo=dt_util.UTC) } def mock_now(): return mock_data["return_time"] with patch( "homeassistant.components.statistics.sensor.dt_util.utcnow", new=mock_now ): assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() for value in self.values: self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS}, ) self.hass.block_till_done() # insert the next value one minute later mock_data["return_time"] += timedelta(minutes=1) state = self.hass.states.get("sensor.test") assert datetime( now.year + 1, 8, 2, 12, 23, 42, tzinfo=dt_util.UTC ) == state.attributes.get("min_age") assert datetime( now.year + 1, 8, 2, 12, 23 + self.count - 1, 42, tzinfo=dt_util.UTC ) == state.attributes.get("max_age") assert self.change_rate == state.attributes.get("change_rate") def test_initialize_from_database(self): """Test initializing the statistics from the database.""" # enable the recorder init_recorder_component(self.hass) self.hass.block_till_done() self.hass.data[recorder.DATA_INSTANCE].block_till_done() # store some values for value in self.values: self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS} ) self.hass.block_till_done() # wait for the recorder to really store the data wait_recording_done(self.hass) # only now create the statistics component, so that it must read the # data from the database assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", "sampling_size": 100, } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() # check if the result is as in test_sensor_source() state = self.hass.states.get("sensor.test") assert str(self.mean) == state.state def test_initialize_from_database_with_maxage(self): """Test initializing the statistics from the database.""" now = dt_util.utcnow() mock_data = { "return_time": datetime(now.year + 1, 8, 2, 12, 23, 42, tzinfo=dt_util.UTC) } def mock_now(): return mock_data["return_time"] # Testing correct retrieval from recorder, thus we do not # want purging to occur within the class itself. def mock_purge(self): return # Set maximum age to 3 hours. max_age = 3 # Determine what our minimum age should be based on test values. expected_min_age = mock_data["return_time"] + timedelta( hours=len(self.values) - max_age ) # enable the recorder init_recorder_component(self.hass) self.hass.block_till_done() self.hass.data[recorder.DATA_INSTANCE].block_till_done() with patch( "homeassistant.components.statistics.sensor.dt_util.utcnow", new=mock_now ), patch.object(StatisticsSensor, "_purge_old", mock_purge): # store some values for value in self.values: self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS}, ) self.hass.block_till_done() # insert the next value 1 hour later mock_data["return_time"] += timedelta(hours=1) # wait for the recorder to really store the data wait_recording_done(self.hass) # only now create the statistics component, so that it must read # the data from the database assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", "sampling_size": 100, "max_age": {"hours": max_age}, } }, ) self.hass.block_till_done() self.hass.block_till_done() self.hass.start() self.hass.block_till_done() # check if the result is as in test_sensor_source() state = self.hass.states.get("sensor.test") assert expected_min_age == state.attributes.get("min_age") # The max_age timestamp should be 1 hour before what we have right # now in mock_data['return_time']. assert mock_data["return_time"] == state.attributes.get("max_age") + timedelta( hours=1 )
pschmitt/home-assistant
tests/components/statistics/test_sensor.py
homeassistant/components/openweathermap/weather.py
"""Syslog notification service.""" import logging import syslog import voluptuous as vol from homeassistant.components.notify import ( ATTR_TITLE, ATTR_TITLE_DEFAULT, PLATFORM_SCHEMA, BaseNotificationService, ) _LOGGER = logging.getLogger(__name__) CONF_FACILITY = "facility" CONF_OPTION = "option" CONF_PRIORITY = "priority" SYSLOG_FACILITY = { "kernel": "LOG_KERN", "user": "LOG_USER", "mail": "LOG_MAIL", "daemon": "LOG_DAEMON", "auth": "LOG_KERN", "LPR": "LOG_LPR", "news": "LOG_NEWS", "uucp": "LOG_UUCP", "cron": "LOG_CRON", "syslog": "LOG_SYSLOG", "local0": "LOG_LOCAL0", "local1": "LOG_LOCAL1", "local2": "LOG_LOCAL2", "local3": "LOG_LOCAL3", "local4": "LOG_LOCAL4", "local5": "LOG_LOCAL5", "local6": "LOG_LOCAL6", "local7": "LOG_LOCAL7", } SYSLOG_OPTION = { "pid": "LOG_PID", "cons": "LOG_CONS", "ndelay": "LOG_NDELAY", "nowait": "LOG_NOWAIT", "perror": "LOG_PERROR", } SYSLOG_PRIORITY = { 5: "LOG_EMERG", 4: "LOG_ALERT", 3: "LOG_CRIT", 2: "LOG_ERR", 1: "LOG_WARNING", 0: "LOG_NOTICE", -1: "LOG_INFO", -2: "LOG_DEBUG", } PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( { vol.Optional(CONF_FACILITY, default="syslog"): vol.In(SYSLOG_FACILITY.keys()), vol.Optional(CONF_OPTION, default="pid"): vol.In(SYSLOG_OPTION.keys()), vol.Optional(CONF_PRIORITY, default=-1): vol.In(SYSLOG_PRIORITY.keys()), } ) def get_service(hass, config, discovery_info=None): """Get the syslog notification service.""" facility = getattr(syslog, SYSLOG_FACILITY[config.get(CONF_FACILITY)]) option = getattr(syslog, SYSLOG_OPTION[config.get(CONF_OPTION)]) priority = getattr(syslog, SYSLOG_PRIORITY[config.get(CONF_PRIORITY)]) return SyslogNotificationService(facility, option, priority) class SyslogNotificationService(BaseNotificationService): """Implement the syslog notification service.""" def __init__(self, facility, option, priority): """Initialize the service.""" self._facility = facility self._option = option self._priority = priority def send_message(self, message="", **kwargs): """Send a message to a user.""" title = kwargs.get(ATTR_TITLE, ATTR_TITLE_DEFAULT) syslog.openlog(title, self._option, self._facility) syslog.syslog(self._priority, message) syslog.closelog()
"""The test for the statistics sensor platform.""" from datetime import datetime, timedelta import statistics import unittest import pytest from homeassistant.components import recorder from homeassistant.components.statistics.sensor import StatisticsSensor from homeassistant.const import ATTR_UNIT_OF_MEASUREMENT, STATE_UNKNOWN, TEMP_CELSIUS from homeassistant.setup import setup_component from homeassistant.util import dt as dt_util from tests.async_mock import patch from tests.common import ( fire_time_changed, get_test_home_assistant, init_recorder_component, ) from tests.components.recorder.common import wait_recording_done @pytest.fixture(autouse=True) def mock_legacy_time(legacy_patchable_time): """Make time patchable for all the tests.""" yield class TestStatisticsSensor(unittest.TestCase): """Test the Statistics sensor.""" def setup_method(self, method): """Set up things to be run when tests are started.""" self.hass = get_test_home_assistant() self.values = [17, 20, 15.2, 5, 3.8, 9.2, 6.7, 14, 6] self.count = len(self.values) self.min = min(self.values) self.max = max(self.values) self.total = sum(self.values) self.mean = round(sum(self.values) / len(self.values), 2) self.median = round(statistics.median(self.values), 2) self.deviation = round(statistics.stdev(self.values), 2) self.variance = round(statistics.variance(self.values), 2) self.change = round(self.values[-1] - self.values[0], 2) self.average_change = round(self.change / (len(self.values) - 1), 2) self.change_rate = round(self.change / (60 * (self.count - 1)), 2) self.addCleanup(self.hass.stop) def test_binary_sensor_source(self): """Test if source is a sensor.""" values = ["on", "off", "on", "off", "on", "off", "on"] assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "binary_sensor.test_monitored", } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() for value in values: self.hass.states.set("binary_sensor.test_monitored", value) self.hass.block_till_done() state = self.hass.states.get("sensor.test") assert str(len(values)) == state.state def test_sensor_source(self): """Test if source is a sensor.""" assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() for value in self.values: self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS} ) self.hass.block_till_done() state = self.hass.states.get("sensor.test") assert str(self.mean) == state.state assert self.min == state.attributes.get("min_value") assert self.max == state.attributes.get("max_value") assert self.variance == state.attributes.get("variance") assert self.median == state.attributes.get("median") assert self.deviation == state.attributes.get("standard_deviation") assert self.mean == state.attributes.get("mean") assert self.count == state.attributes.get("count") assert self.total == state.attributes.get("total") assert TEMP_CELSIUS == state.attributes.get("unit_of_measurement") assert self.change == state.attributes.get("change") assert self.average_change == state.attributes.get("average_change") def test_sampling_size(self): """Test rotation.""" assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", "sampling_size": 5, } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() for value in self.values: self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS} ) self.hass.block_till_done() state = self.hass.states.get("sensor.test") assert 3.8 == state.attributes.get("min_value") assert 14 == state.attributes.get("max_value") def test_sampling_size_1(self): """Test validity of stats requiring only one sample.""" assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", "sampling_size": 1, } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() for value in self.values[-3:]: # just the last 3 will do self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS} ) self.hass.block_till_done() state = self.hass.states.get("sensor.test") # require only one data point assert self.values[-1] == state.attributes.get("min_value") assert self.values[-1] == state.attributes.get("max_value") assert self.values[-1] == state.attributes.get("mean") assert self.values[-1] == state.attributes.get("median") assert self.values[-1] == state.attributes.get("total") assert 0 == state.attributes.get("change") assert 0 == state.attributes.get("average_change") # require at least two data points assert STATE_UNKNOWN == state.attributes.get("variance") assert STATE_UNKNOWN == state.attributes.get("standard_deviation") def test_max_age(self): """Test value deprecation.""" now = dt_util.utcnow() mock_data = { "return_time": datetime(now.year + 1, 8, 2, 12, 23, tzinfo=dt_util.UTC) } def mock_now(): return mock_data["return_time"] with patch( "homeassistant.components.statistics.sensor.dt_util.utcnow", new=mock_now ): assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", "max_age": {"minutes": 3}, } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() for value in self.values: self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS}, ) self.hass.block_till_done() # insert the next value one minute later mock_data["return_time"] += timedelta(minutes=1) state = self.hass.states.get("sensor.test") assert 6 == state.attributes.get("min_value") assert 14 == state.attributes.get("max_value") def test_max_age_without_sensor_change(self): """Test value deprecation.""" now = dt_util.utcnow() mock_data = { "return_time": datetime(now.year + 1, 8, 2, 12, 23, tzinfo=dt_util.UTC) } def mock_now(): return mock_data["return_time"] with patch( "homeassistant.components.statistics.sensor.dt_util.utcnow", new=mock_now ): assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", "max_age": {"minutes": 3}, } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() for value in self.values: self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS}, ) self.hass.block_till_done() # insert the next value 30 seconds later mock_data["return_time"] += timedelta(seconds=30) state = self.hass.states.get("sensor.test") assert 3.8 == state.attributes.get("min_value") assert 15.2 == state.attributes.get("max_value") # wait for 3 minutes (max_age). mock_data["return_time"] += timedelta(minutes=3) fire_time_changed(self.hass, mock_data["return_time"]) self.hass.block_till_done() state = self.hass.states.get("sensor.test") assert state.attributes.get("min_value") == STATE_UNKNOWN assert state.attributes.get("max_value") == STATE_UNKNOWN assert state.attributes.get("count") == 0 def test_change_rate(self): """Test min_age/max_age and change_rate.""" now = dt_util.utcnow() mock_data = { "return_time": datetime(now.year + 1, 8, 2, 12, 23, 42, tzinfo=dt_util.UTC) } def mock_now(): return mock_data["return_time"] with patch( "homeassistant.components.statistics.sensor.dt_util.utcnow", new=mock_now ): assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() for value in self.values: self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS}, ) self.hass.block_till_done() # insert the next value one minute later mock_data["return_time"] += timedelta(minutes=1) state = self.hass.states.get("sensor.test") assert datetime( now.year + 1, 8, 2, 12, 23, 42, tzinfo=dt_util.UTC ) == state.attributes.get("min_age") assert datetime( now.year + 1, 8, 2, 12, 23 + self.count - 1, 42, tzinfo=dt_util.UTC ) == state.attributes.get("max_age") assert self.change_rate == state.attributes.get("change_rate") def test_initialize_from_database(self): """Test initializing the statistics from the database.""" # enable the recorder init_recorder_component(self.hass) self.hass.block_till_done() self.hass.data[recorder.DATA_INSTANCE].block_till_done() # store some values for value in self.values: self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS} ) self.hass.block_till_done() # wait for the recorder to really store the data wait_recording_done(self.hass) # only now create the statistics component, so that it must read the # data from the database assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", "sampling_size": 100, } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() # check if the result is as in test_sensor_source() state = self.hass.states.get("sensor.test") assert str(self.mean) == state.state def test_initialize_from_database_with_maxage(self): """Test initializing the statistics from the database.""" now = dt_util.utcnow() mock_data = { "return_time": datetime(now.year + 1, 8, 2, 12, 23, 42, tzinfo=dt_util.UTC) } def mock_now(): return mock_data["return_time"] # Testing correct retrieval from recorder, thus we do not # want purging to occur within the class itself. def mock_purge(self): return # Set maximum age to 3 hours. max_age = 3 # Determine what our minimum age should be based on test values. expected_min_age = mock_data["return_time"] + timedelta( hours=len(self.values) - max_age ) # enable the recorder init_recorder_component(self.hass) self.hass.block_till_done() self.hass.data[recorder.DATA_INSTANCE].block_till_done() with patch( "homeassistant.components.statistics.sensor.dt_util.utcnow", new=mock_now ), patch.object(StatisticsSensor, "_purge_old", mock_purge): # store some values for value in self.values: self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS}, ) self.hass.block_till_done() # insert the next value 1 hour later mock_data["return_time"] += timedelta(hours=1) # wait for the recorder to really store the data wait_recording_done(self.hass) # only now create the statistics component, so that it must read # the data from the database assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", "sampling_size": 100, "max_age": {"hours": max_age}, } }, ) self.hass.block_till_done() self.hass.block_till_done() self.hass.start() self.hass.block_till_done() # check if the result is as in test_sensor_source() state = self.hass.states.get("sensor.test") assert expected_min_age == state.attributes.get("min_age") # The max_age timestamp should be 1 hour before what we have right # now in mock_data['return_time']. assert mock_data["return_time"] == state.attributes.get("max_age") + timedelta( hours=1 )
pschmitt/home-assistant
tests/components/statistics/test_sensor.py
homeassistant/components/syslog/notify.py
"""Config flow to configure the RainMachine component.""" from regenmaschine import login from regenmaschine.errors import RainMachineError import voluptuous as vol from homeassistant import config_entries from homeassistant.const import ( CONF_IP_ADDRESS, CONF_PASSWORD, CONF_PORT, CONF_SCAN_INTERVAL, CONF_SSL, ) from homeassistant.helpers import aiohttp_client from .const import ( # pylint: disable=unused-import CONF_ZONE_RUN_TIME, DEFAULT_PORT, DEFAULT_SCAN_INTERVAL, DEFAULT_ZONE_RUN, DOMAIN, ) class RainMachineFlowHandler(config_entries.ConfigFlow, domain=DOMAIN): """Handle a RainMachine config flow.""" VERSION = 1 CONNECTION_CLASS = config_entries.CONN_CLASS_LOCAL_POLL def __init__(self): """Initialize the config flow.""" self.data_schema = vol.Schema( { vol.Required(CONF_IP_ADDRESS): str, vol.Required(CONF_PASSWORD): str, vol.Optional(CONF_PORT, default=DEFAULT_PORT): int, } ) async def _show_form(self, errors=None): """Show the form to the user.""" return self.async_show_form( step_id="user", data_schema=self.data_schema, errors=errors if errors else {}, ) async def async_step_import(self, import_config): """Import a config entry from configuration.yaml.""" return await self.async_step_user(import_config) async def async_step_user(self, user_input=None): """Handle the start of the config flow.""" if not user_input: return await self._show_form() await self.async_set_unique_id(user_input[CONF_IP_ADDRESS]) self._abort_if_unique_id_configured() websession = aiohttp_client.async_get_clientsession(self.hass) try: await login( user_input[CONF_IP_ADDRESS], user_input[CONF_PASSWORD], websession, port=user_input[CONF_PORT], ssl=user_input.get(CONF_SSL, True), ) except RainMachineError: return await self._show_form({CONF_PASSWORD: "invalid_credentials"}) # Unfortunately, RainMachine doesn't provide a way to refresh the # access token without using the IP address and password, so we have to # store it: return self.async_create_entry( title=user_input[CONF_IP_ADDRESS], data={ CONF_IP_ADDRESS: user_input[CONF_IP_ADDRESS], CONF_PASSWORD: user_input[CONF_PASSWORD], CONF_PORT: user_input[CONF_PORT], CONF_SSL: user_input.get(CONF_SSL, True), CONF_SCAN_INTERVAL: user_input.get( CONF_SCAN_INTERVAL, DEFAULT_SCAN_INTERVAL.total_seconds() ), CONF_ZONE_RUN_TIME: user_input.get( CONF_ZONE_RUN_TIME, DEFAULT_ZONE_RUN ), }, )
"""The test for the statistics sensor platform.""" from datetime import datetime, timedelta import statistics import unittest import pytest from homeassistant.components import recorder from homeassistant.components.statistics.sensor import StatisticsSensor from homeassistant.const import ATTR_UNIT_OF_MEASUREMENT, STATE_UNKNOWN, TEMP_CELSIUS from homeassistant.setup import setup_component from homeassistant.util import dt as dt_util from tests.async_mock import patch from tests.common import ( fire_time_changed, get_test_home_assistant, init_recorder_component, ) from tests.components.recorder.common import wait_recording_done @pytest.fixture(autouse=True) def mock_legacy_time(legacy_patchable_time): """Make time patchable for all the tests.""" yield class TestStatisticsSensor(unittest.TestCase): """Test the Statistics sensor.""" def setup_method(self, method): """Set up things to be run when tests are started.""" self.hass = get_test_home_assistant() self.values = [17, 20, 15.2, 5, 3.8, 9.2, 6.7, 14, 6] self.count = len(self.values) self.min = min(self.values) self.max = max(self.values) self.total = sum(self.values) self.mean = round(sum(self.values) / len(self.values), 2) self.median = round(statistics.median(self.values), 2) self.deviation = round(statistics.stdev(self.values), 2) self.variance = round(statistics.variance(self.values), 2) self.change = round(self.values[-1] - self.values[0], 2) self.average_change = round(self.change / (len(self.values) - 1), 2) self.change_rate = round(self.change / (60 * (self.count - 1)), 2) self.addCleanup(self.hass.stop) def test_binary_sensor_source(self): """Test if source is a sensor.""" values = ["on", "off", "on", "off", "on", "off", "on"] assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "binary_sensor.test_monitored", } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() for value in values: self.hass.states.set("binary_sensor.test_monitored", value) self.hass.block_till_done() state = self.hass.states.get("sensor.test") assert str(len(values)) == state.state def test_sensor_source(self): """Test if source is a sensor.""" assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() for value in self.values: self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS} ) self.hass.block_till_done() state = self.hass.states.get("sensor.test") assert str(self.mean) == state.state assert self.min == state.attributes.get("min_value") assert self.max == state.attributes.get("max_value") assert self.variance == state.attributes.get("variance") assert self.median == state.attributes.get("median") assert self.deviation == state.attributes.get("standard_deviation") assert self.mean == state.attributes.get("mean") assert self.count == state.attributes.get("count") assert self.total == state.attributes.get("total") assert TEMP_CELSIUS == state.attributes.get("unit_of_measurement") assert self.change == state.attributes.get("change") assert self.average_change == state.attributes.get("average_change") def test_sampling_size(self): """Test rotation.""" assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", "sampling_size": 5, } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() for value in self.values: self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS} ) self.hass.block_till_done() state = self.hass.states.get("sensor.test") assert 3.8 == state.attributes.get("min_value") assert 14 == state.attributes.get("max_value") def test_sampling_size_1(self): """Test validity of stats requiring only one sample.""" assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", "sampling_size": 1, } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() for value in self.values[-3:]: # just the last 3 will do self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS} ) self.hass.block_till_done() state = self.hass.states.get("sensor.test") # require only one data point assert self.values[-1] == state.attributes.get("min_value") assert self.values[-1] == state.attributes.get("max_value") assert self.values[-1] == state.attributes.get("mean") assert self.values[-1] == state.attributes.get("median") assert self.values[-1] == state.attributes.get("total") assert 0 == state.attributes.get("change") assert 0 == state.attributes.get("average_change") # require at least two data points assert STATE_UNKNOWN == state.attributes.get("variance") assert STATE_UNKNOWN == state.attributes.get("standard_deviation") def test_max_age(self): """Test value deprecation.""" now = dt_util.utcnow() mock_data = { "return_time": datetime(now.year + 1, 8, 2, 12, 23, tzinfo=dt_util.UTC) } def mock_now(): return mock_data["return_time"] with patch( "homeassistant.components.statistics.sensor.dt_util.utcnow", new=mock_now ): assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", "max_age": {"minutes": 3}, } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() for value in self.values: self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS}, ) self.hass.block_till_done() # insert the next value one minute later mock_data["return_time"] += timedelta(minutes=1) state = self.hass.states.get("sensor.test") assert 6 == state.attributes.get("min_value") assert 14 == state.attributes.get("max_value") def test_max_age_without_sensor_change(self): """Test value deprecation.""" now = dt_util.utcnow() mock_data = { "return_time": datetime(now.year + 1, 8, 2, 12, 23, tzinfo=dt_util.UTC) } def mock_now(): return mock_data["return_time"] with patch( "homeassistant.components.statistics.sensor.dt_util.utcnow", new=mock_now ): assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", "max_age": {"minutes": 3}, } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() for value in self.values: self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS}, ) self.hass.block_till_done() # insert the next value 30 seconds later mock_data["return_time"] += timedelta(seconds=30) state = self.hass.states.get("sensor.test") assert 3.8 == state.attributes.get("min_value") assert 15.2 == state.attributes.get("max_value") # wait for 3 minutes (max_age). mock_data["return_time"] += timedelta(minutes=3) fire_time_changed(self.hass, mock_data["return_time"]) self.hass.block_till_done() state = self.hass.states.get("sensor.test") assert state.attributes.get("min_value") == STATE_UNKNOWN assert state.attributes.get("max_value") == STATE_UNKNOWN assert state.attributes.get("count") == 0 def test_change_rate(self): """Test min_age/max_age and change_rate.""" now = dt_util.utcnow() mock_data = { "return_time": datetime(now.year + 1, 8, 2, 12, 23, 42, tzinfo=dt_util.UTC) } def mock_now(): return mock_data["return_time"] with patch( "homeassistant.components.statistics.sensor.dt_util.utcnow", new=mock_now ): assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() for value in self.values: self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS}, ) self.hass.block_till_done() # insert the next value one minute later mock_data["return_time"] += timedelta(minutes=1) state = self.hass.states.get("sensor.test") assert datetime( now.year + 1, 8, 2, 12, 23, 42, tzinfo=dt_util.UTC ) == state.attributes.get("min_age") assert datetime( now.year + 1, 8, 2, 12, 23 + self.count - 1, 42, tzinfo=dt_util.UTC ) == state.attributes.get("max_age") assert self.change_rate == state.attributes.get("change_rate") def test_initialize_from_database(self): """Test initializing the statistics from the database.""" # enable the recorder init_recorder_component(self.hass) self.hass.block_till_done() self.hass.data[recorder.DATA_INSTANCE].block_till_done() # store some values for value in self.values: self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS} ) self.hass.block_till_done() # wait for the recorder to really store the data wait_recording_done(self.hass) # only now create the statistics component, so that it must read the # data from the database assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", "sampling_size": 100, } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() # check if the result is as in test_sensor_source() state = self.hass.states.get("sensor.test") assert str(self.mean) == state.state def test_initialize_from_database_with_maxage(self): """Test initializing the statistics from the database.""" now = dt_util.utcnow() mock_data = { "return_time": datetime(now.year + 1, 8, 2, 12, 23, 42, tzinfo=dt_util.UTC) } def mock_now(): return mock_data["return_time"] # Testing correct retrieval from recorder, thus we do not # want purging to occur within the class itself. def mock_purge(self): return # Set maximum age to 3 hours. max_age = 3 # Determine what our minimum age should be based on test values. expected_min_age = mock_data["return_time"] + timedelta( hours=len(self.values) - max_age ) # enable the recorder init_recorder_component(self.hass) self.hass.block_till_done() self.hass.data[recorder.DATA_INSTANCE].block_till_done() with patch( "homeassistant.components.statistics.sensor.dt_util.utcnow", new=mock_now ), patch.object(StatisticsSensor, "_purge_old", mock_purge): # store some values for value in self.values: self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS}, ) self.hass.block_till_done() # insert the next value 1 hour later mock_data["return_time"] += timedelta(hours=1) # wait for the recorder to really store the data wait_recording_done(self.hass) # only now create the statistics component, so that it must read # the data from the database assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", "sampling_size": 100, "max_age": {"hours": max_age}, } }, ) self.hass.block_till_done() self.hass.block_till_done() self.hass.start() self.hass.block_till_done() # check if the result is as in test_sensor_source() state = self.hass.states.get("sensor.test") assert expected_min_age == state.attributes.get("min_age") # The max_age timestamp should be 1 hour before what we have right # now in mock_data['return_time']. assert mock_data["return_time"] == state.attributes.get("max_age") + timedelta( hours=1 )
pschmitt/home-assistant
tests/components/statistics/test_sensor.py
homeassistant/components/rainmachine/config_flow.py
"""Tracks the latency of a host by sending ICMP echo requests (ping).""" from datetime import timedelta import logging import re import subprocess import sys import voluptuous as vol from homeassistant.components.binary_sensor import PLATFORM_SCHEMA, BinarySensorEntity from homeassistant.const import CONF_HOST, CONF_NAME import homeassistant.helpers.config_validation as cv _LOGGER = logging.getLogger(__name__) ATTR_ROUND_TRIP_TIME_AVG = "round_trip_time_avg" ATTR_ROUND_TRIP_TIME_MAX = "round_trip_time_max" ATTR_ROUND_TRIP_TIME_MDEV = "round_trip_time_mdev" ATTR_ROUND_TRIP_TIME_MIN = "round_trip_time_min" CONF_PING_COUNT = "count" DEFAULT_NAME = "Ping Binary sensor" DEFAULT_PING_COUNT = 5 DEFAULT_DEVICE_CLASS = "connectivity" SCAN_INTERVAL = timedelta(minutes=5) PING_MATCHER = re.compile( r"(?P<min>\d+.\d+)\/(?P<avg>\d+.\d+)\/(?P<max>\d+.\d+)\/(?P<mdev>\d+.\d+)" ) PING_MATCHER_BUSYBOX = re.compile( r"(?P<min>\d+.\d+)\/(?P<avg>\d+.\d+)\/(?P<max>\d+.\d+)" ) WIN32_PING_MATCHER = re.compile(r"(?P<min>\d+)ms.+(?P<max>\d+)ms.+(?P<avg>\d+)ms") PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( { vol.Required(CONF_HOST): cv.string, vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string, vol.Optional(CONF_PING_COUNT, default=DEFAULT_PING_COUNT): cv.positive_int, } ) def setup_platform(hass, config, add_entities, discovery_info=None): """Set up the Ping Binary sensor.""" name = config.get(CONF_NAME) host = config.get(CONF_HOST) count = config.get(CONF_PING_COUNT) add_entities([PingBinarySensor(name, PingData(host, count))], True) class PingBinarySensor(BinarySensorEntity): """Representation of a Ping Binary sensor.""" def __init__(self, name, ping): """Initialize the Ping Binary sensor.""" self._name = name self.ping = ping @property def name(self): """Return the name of the device.""" return self._name @property def device_class(self): """Return the class of this sensor.""" return DEFAULT_DEVICE_CLASS @property def is_on(self): """Return true if the binary sensor is on.""" return self.ping.available @property def device_state_attributes(self): """Return the state attributes of the ICMP checo request.""" if self.ping.data is not False: return { ATTR_ROUND_TRIP_TIME_AVG: self.ping.data["avg"], ATTR_ROUND_TRIP_TIME_MAX: self.ping.data["max"], ATTR_ROUND_TRIP_TIME_MDEV: self.ping.data["mdev"], ATTR_ROUND_TRIP_TIME_MIN: self.ping.data["min"], } def update(self): """Get the latest data.""" self.ping.update() class PingData: """The Class for handling the data retrieval.""" def __init__(self, host, count): """Initialize the data object.""" self._ip_address = host self._count = count self.data = {} self.available = False if sys.platform == "win32": self._ping_cmd = [ "ping", "-n", str(self._count), "-w", "1000", self._ip_address, ] else: self._ping_cmd = [ "ping", "-n", "-q", "-c", str(self._count), "-W1", self._ip_address, ] def ping(self): """Send ICMP echo request and return details if success.""" pinger = subprocess.Popen( self._ping_cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE ) try: out = pinger.communicate() _LOGGER.debug("Output is %s", str(out)) if sys.platform == "win32": match = WIN32_PING_MATCHER.search(str(out).split("\n")[-1]) rtt_min, rtt_avg, rtt_max = match.groups() return {"min": rtt_min, "avg": rtt_avg, "max": rtt_max, "mdev": ""} if "max/" not in str(out): match = PING_MATCHER_BUSYBOX.search(str(out).split("\n")[-1]) rtt_min, rtt_avg, rtt_max = match.groups() return {"min": rtt_min, "avg": rtt_avg, "max": rtt_max, "mdev": ""} match = PING_MATCHER.search(str(out).split("\n")[-1]) rtt_min, rtt_avg, rtt_max, rtt_mdev = match.groups() return {"min": rtt_min, "avg": rtt_avg, "max": rtt_max, "mdev": rtt_mdev} except (subprocess.CalledProcessError, AttributeError): return False def update(self): """Retrieve the latest details from the host.""" self.data = self.ping() self.available = bool(self.data)
"""The test for the statistics sensor platform.""" from datetime import datetime, timedelta import statistics import unittest import pytest from homeassistant.components import recorder from homeassistant.components.statistics.sensor import StatisticsSensor from homeassistant.const import ATTR_UNIT_OF_MEASUREMENT, STATE_UNKNOWN, TEMP_CELSIUS from homeassistant.setup import setup_component from homeassistant.util import dt as dt_util from tests.async_mock import patch from tests.common import ( fire_time_changed, get_test_home_assistant, init_recorder_component, ) from tests.components.recorder.common import wait_recording_done @pytest.fixture(autouse=True) def mock_legacy_time(legacy_patchable_time): """Make time patchable for all the tests.""" yield class TestStatisticsSensor(unittest.TestCase): """Test the Statistics sensor.""" def setup_method(self, method): """Set up things to be run when tests are started.""" self.hass = get_test_home_assistant() self.values = [17, 20, 15.2, 5, 3.8, 9.2, 6.7, 14, 6] self.count = len(self.values) self.min = min(self.values) self.max = max(self.values) self.total = sum(self.values) self.mean = round(sum(self.values) / len(self.values), 2) self.median = round(statistics.median(self.values), 2) self.deviation = round(statistics.stdev(self.values), 2) self.variance = round(statistics.variance(self.values), 2) self.change = round(self.values[-1] - self.values[0], 2) self.average_change = round(self.change / (len(self.values) - 1), 2) self.change_rate = round(self.change / (60 * (self.count - 1)), 2) self.addCleanup(self.hass.stop) def test_binary_sensor_source(self): """Test if source is a sensor.""" values = ["on", "off", "on", "off", "on", "off", "on"] assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "binary_sensor.test_monitored", } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() for value in values: self.hass.states.set("binary_sensor.test_monitored", value) self.hass.block_till_done() state = self.hass.states.get("sensor.test") assert str(len(values)) == state.state def test_sensor_source(self): """Test if source is a sensor.""" assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() for value in self.values: self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS} ) self.hass.block_till_done() state = self.hass.states.get("sensor.test") assert str(self.mean) == state.state assert self.min == state.attributes.get("min_value") assert self.max == state.attributes.get("max_value") assert self.variance == state.attributes.get("variance") assert self.median == state.attributes.get("median") assert self.deviation == state.attributes.get("standard_deviation") assert self.mean == state.attributes.get("mean") assert self.count == state.attributes.get("count") assert self.total == state.attributes.get("total") assert TEMP_CELSIUS == state.attributes.get("unit_of_measurement") assert self.change == state.attributes.get("change") assert self.average_change == state.attributes.get("average_change") def test_sampling_size(self): """Test rotation.""" assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", "sampling_size": 5, } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() for value in self.values: self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS} ) self.hass.block_till_done() state = self.hass.states.get("sensor.test") assert 3.8 == state.attributes.get("min_value") assert 14 == state.attributes.get("max_value") def test_sampling_size_1(self): """Test validity of stats requiring only one sample.""" assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", "sampling_size": 1, } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() for value in self.values[-3:]: # just the last 3 will do self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS} ) self.hass.block_till_done() state = self.hass.states.get("sensor.test") # require only one data point assert self.values[-1] == state.attributes.get("min_value") assert self.values[-1] == state.attributes.get("max_value") assert self.values[-1] == state.attributes.get("mean") assert self.values[-1] == state.attributes.get("median") assert self.values[-1] == state.attributes.get("total") assert 0 == state.attributes.get("change") assert 0 == state.attributes.get("average_change") # require at least two data points assert STATE_UNKNOWN == state.attributes.get("variance") assert STATE_UNKNOWN == state.attributes.get("standard_deviation") def test_max_age(self): """Test value deprecation.""" now = dt_util.utcnow() mock_data = { "return_time": datetime(now.year + 1, 8, 2, 12, 23, tzinfo=dt_util.UTC) } def mock_now(): return mock_data["return_time"] with patch( "homeassistant.components.statistics.sensor.dt_util.utcnow", new=mock_now ): assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", "max_age": {"minutes": 3}, } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() for value in self.values: self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS}, ) self.hass.block_till_done() # insert the next value one minute later mock_data["return_time"] += timedelta(minutes=1) state = self.hass.states.get("sensor.test") assert 6 == state.attributes.get("min_value") assert 14 == state.attributes.get("max_value") def test_max_age_without_sensor_change(self): """Test value deprecation.""" now = dt_util.utcnow() mock_data = { "return_time": datetime(now.year + 1, 8, 2, 12, 23, tzinfo=dt_util.UTC) } def mock_now(): return mock_data["return_time"] with patch( "homeassistant.components.statistics.sensor.dt_util.utcnow", new=mock_now ): assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", "max_age": {"minutes": 3}, } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() for value in self.values: self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS}, ) self.hass.block_till_done() # insert the next value 30 seconds later mock_data["return_time"] += timedelta(seconds=30) state = self.hass.states.get("sensor.test") assert 3.8 == state.attributes.get("min_value") assert 15.2 == state.attributes.get("max_value") # wait for 3 minutes (max_age). mock_data["return_time"] += timedelta(minutes=3) fire_time_changed(self.hass, mock_data["return_time"]) self.hass.block_till_done() state = self.hass.states.get("sensor.test") assert state.attributes.get("min_value") == STATE_UNKNOWN assert state.attributes.get("max_value") == STATE_UNKNOWN assert state.attributes.get("count") == 0 def test_change_rate(self): """Test min_age/max_age and change_rate.""" now = dt_util.utcnow() mock_data = { "return_time": datetime(now.year + 1, 8, 2, 12, 23, 42, tzinfo=dt_util.UTC) } def mock_now(): return mock_data["return_time"] with patch( "homeassistant.components.statistics.sensor.dt_util.utcnow", new=mock_now ): assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() for value in self.values: self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS}, ) self.hass.block_till_done() # insert the next value one minute later mock_data["return_time"] += timedelta(minutes=1) state = self.hass.states.get("sensor.test") assert datetime( now.year + 1, 8, 2, 12, 23, 42, tzinfo=dt_util.UTC ) == state.attributes.get("min_age") assert datetime( now.year + 1, 8, 2, 12, 23 + self.count - 1, 42, tzinfo=dt_util.UTC ) == state.attributes.get("max_age") assert self.change_rate == state.attributes.get("change_rate") def test_initialize_from_database(self): """Test initializing the statistics from the database.""" # enable the recorder init_recorder_component(self.hass) self.hass.block_till_done() self.hass.data[recorder.DATA_INSTANCE].block_till_done() # store some values for value in self.values: self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS} ) self.hass.block_till_done() # wait for the recorder to really store the data wait_recording_done(self.hass) # only now create the statistics component, so that it must read the # data from the database assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", "sampling_size": 100, } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() # check if the result is as in test_sensor_source() state = self.hass.states.get("sensor.test") assert str(self.mean) == state.state def test_initialize_from_database_with_maxage(self): """Test initializing the statistics from the database.""" now = dt_util.utcnow() mock_data = { "return_time": datetime(now.year + 1, 8, 2, 12, 23, 42, tzinfo=dt_util.UTC) } def mock_now(): return mock_data["return_time"] # Testing correct retrieval from recorder, thus we do not # want purging to occur within the class itself. def mock_purge(self): return # Set maximum age to 3 hours. max_age = 3 # Determine what our minimum age should be based on test values. expected_min_age = mock_data["return_time"] + timedelta( hours=len(self.values) - max_age ) # enable the recorder init_recorder_component(self.hass) self.hass.block_till_done() self.hass.data[recorder.DATA_INSTANCE].block_till_done() with patch( "homeassistant.components.statistics.sensor.dt_util.utcnow", new=mock_now ), patch.object(StatisticsSensor, "_purge_old", mock_purge): # store some values for value in self.values: self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS}, ) self.hass.block_till_done() # insert the next value 1 hour later mock_data["return_time"] += timedelta(hours=1) # wait for the recorder to really store the data wait_recording_done(self.hass) # only now create the statistics component, so that it must read # the data from the database assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", "sampling_size": 100, "max_age": {"hours": max_age}, } }, ) self.hass.block_till_done() self.hass.block_till_done() self.hass.start() self.hass.block_till_done() # check if the result is as in test_sensor_source() state = self.hass.states.get("sensor.test") assert expected_min_age == state.attributes.get("min_age") # The max_age timestamp should be 1 hour before what we have right # now in mock_data['return_time']. assert mock_data["return_time"] == state.attributes.get("max_age") + timedelta( hours=1 )
pschmitt/home-assistant
tests/components/statistics/test_sensor.py
homeassistant/components/ping/binary_sensor.py
"""Support for Volvo heater.""" import logging from homeassistant.helpers.entity import ToggleEntity from . import DATA_KEY, VolvoEntity _LOGGER = logging.getLogger(__name__) async def async_setup_platform(hass, config, async_add_entities, discovery_info=None): """Set up a Volvo switch.""" if discovery_info is None: return async_add_entities([VolvoSwitch(hass.data[DATA_KEY], *discovery_info)]) class VolvoSwitch(VolvoEntity, ToggleEntity): """Representation of a Volvo switch.""" @property def is_on(self): """Return true if switch is on.""" return self.instrument.state async def async_turn_on(self, **kwargs): """Turn the switch on.""" await self.instrument.turn_on() self.async_write_ha_state() async def async_turn_off(self, **kwargs): """Turn the switch off.""" await self.instrument.turn_off() self.async_write_ha_state()
"""The test for the statistics sensor platform.""" from datetime import datetime, timedelta import statistics import unittest import pytest from homeassistant.components import recorder from homeassistant.components.statistics.sensor import StatisticsSensor from homeassistant.const import ATTR_UNIT_OF_MEASUREMENT, STATE_UNKNOWN, TEMP_CELSIUS from homeassistant.setup import setup_component from homeassistant.util import dt as dt_util from tests.async_mock import patch from tests.common import ( fire_time_changed, get_test_home_assistant, init_recorder_component, ) from tests.components.recorder.common import wait_recording_done @pytest.fixture(autouse=True) def mock_legacy_time(legacy_patchable_time): """Make time patchable for all the tests.""" yield class TestStatisticsSensor(unittest.TestCase): """Test the Statistics sensor.""" def setup_method(self, method): """Set up things to be run when tests are started.""" self.hass = get_test_home_assistant() self.values = [17, 20, 15.2, 5, 3.8, 9.2, 6.7, 14, 6] self.count = len(self.values) self.min = min(self.values) self.max = max(self.values) self.total = sum(self.values) self.mean = round(sum(self.values) / len(self.values), 2) self.median = round(statistics.median(self.values), 2) self.deviation = round(statistics.stdev(self.values), 2) self.variance = round(statistics.variance(self.values), 2) self.change = round(self.values[-1] - self.values[0], 2) self.average_change = round(self.change / (len(self.values) - 1), 2) self.change_rate = round(self.change / (60 * (self.count - 1)), 2) self.addCleanup(self.hass.stop) def test_binary_sensor_source(self): """Test if source is a sensor.""" values = ["on", "off", "on", "off", "on", "off", "on"] assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "binary_sensor.test_monitored", } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() for value in values: self.hass.states.set("binary_sensor.test_monitored", value) self.hass.block_till_done() state = self.hass.states.get("sensor.test") assert str(len(values)) == state.state def test_sensor_source(self): """Test if source is a sensor.""" assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() for value in self.values: self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS} ) self.hass.block_till_done() state = self.hass.states.get("sensor.test") assert str(self.mean) == state.state assert self.min == state.attributes.get("min_value") assert self.max == state.attributes.get("max_value") assert self.variance == state.attributes.get("variance") assert self.median == state.attributes.get("median") assert self.deviation == state.attributes.get("standard_deviation") assert self.mean == state.attributes.get("mean") assert self.count == state.attributes.get("count") assert self.total == state.attributes.get("total") assert TEMP_CELSIUS == state.attributes.get("unit_of_measurement") assert self.change == state.attributes.get("change") assert self.average_change == state.attributes.get("average_change") def test_sampling_size(self): """Test rotation.""" assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", "sampling_size": 5, } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() for value in self.values: self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS} ) self.hass.block_till_done() state = self.hass.states.get("sensor.test") assert 3.8 == state.attributes.get("min_value") assert 14 == state.attributes.get("max_value") def test_sampling_size_1(self): """Test validity of stats requiring only one sample.""" assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", "sampling_size": 1, } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() for value in self.values[-3:]: # just the last 3 will do self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS} ) self.hass.block_till_done() state = self.hass.states.get("sensor.test") # require only one data point assert self.values[-1] == state.attributes.get("min_value") assert self.values[-1] == state.attributes.get("max_value") assert self.values[-1] == state.attributes.get("mean") assert self.values[-1] == state.attributes.get("median") assert self.values[-1] == state.attributes.get("total") assert 0 == state.attributes.get("change") assert 0 == state.attributes.get("average_change") # require at least two data points assert STATE_UNKNOWN == state.attributes.get("variance") assert STATE_UNKNOWN == state.attributes.get("standard_deviation") def test_max_age(self): """Test value deprecation.""" now = dt_util.utcnow() mock_data = { "return_time": datetime(now.year + 1, 8, 2, 12, 23, tzinfo=dt_util.UTC) } def mock_now(): return mock_data["return_time"] with patch( "homeassistant.components.statistics.sensor.dt_util.utcnow", new=mock_now ): assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", "max_age": {"minutes": 3}, } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() for value in self.values: self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS}, ) self.hass.block_till_done() # insert the next value one minute later mock_data["return_time"] += timedelta(minutes=1) state = self.hass.states.get("sensor.test") assert 6 == state.attributes.get("min_value") assert 14 == state.attributes.get("max_value") def test_max_age_without_sensor_change(self): """Test value deprecation.""" now = dt_util.utcnow() mock_data = { "return_time": datetime(now.year + 1, 8, 2, 12, 23, tzinfo=dt_util.UTC) } def mock_now(): return mock_data["return_time"] with patch( "homeassistant.components.statistics.sensor.dt_util.utcnow", new=mock_now ): assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", "max_age": {"minutes": 3}, } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() for value in self.values: self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS}, ) self.hass.block_till_done() # insert the next value 30 seconds later mock_data["return_time"] += timedelta(seconds=30) state = self.hass.states.get("sensor.test") assert 3.8 == state.attributes.get("min_value") assert 15.2 == state.attributes.get("max_value") # wait for 3 minutes (max_age). mock_data["return_time"] += timedelta(minutes=3) fire_time_changed(self.hass, mock_data["return_time"]) self.hass.block_till_done() state = self.hass.states.get("sensor.test") assert state.attributes.get("min_value") == STATE_UNKNOWN assert state.attributes.get("max_value") == STATE_UNKNOWN assert state.attributes.get("count") == 0 def test_change_rate(self): """Test min_age/max_age and change_rate.""" now = dt_util.utcnow() mock_data = { "return_time": datetime(now.year + 1, 8, 2, 12, 23, 42, tzinfo=dt_util.UTC) } def mock_now(): return mock_data["return_time"] with patch( "homeassistant.components.statistics.sensor.dt_util.utcnow", new=mock_now ): assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() for value in self.values: self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS}, ) self.hass.block_till_done() # insert the next value one minute later mock_data["return_time"] += timedelta(minutes=1) state = self.hass.states.get("sensor.test") assert datetime( now.year + 1, 8, 2, 12, 23, 42, tzinfo=dt_util.UTC ) == state.attributes.get("min_age") assert datetime( now.year + 1, 8, 2, 12, 23 + self.count - 1, 42, tzinfo=dt_util.UTC ) == state.attributes.get("max_age") assert self.change_rate == state.attributes.get("change_rate") def test_initialize_from_database(self): """Test initializing the statistics from the database.""" # enable the recorder init_recorder_component(self.hass) self.hass.block_till_done() self.hass.data[recorder.DATA_INSTANCE].block_till_done() # store some values for value in self.values: self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS} ) self.hass.block_till_done() # wait for the recorder to really store the data wait_recording_done(self.hass) # only now create the statistics component, so that it must read the # data from the database assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", "sampling_size": 100, } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() # check if the result is as in test_sensor_source() state = self.hass.states.get("sensor.test") assert str(self.mean) == state.state def test_initialize_from_database_with_maxage(self): """Test initializing the statistics from the database.""" now = dt_util.utcnow() mock_data = { "return_time": datetime(now.year + 1, 8, 2, 12, 23, 42, tzinfo=dt_util.UTC) } def mock_now(): return mock_data["return_time"] # Testing correct retrieval from recorder, thus we do not # want purging to occur within the class itself. def mock_purge(self): return # Set maximum age to 3 hours. max_age = 3 # Determine what our minimum age should be based on test values. expected_min_age = mock_data["return_time"] + timedelta( hours=len(self.values) - max_age ) # enable the recorder init_recorder_component(self.hass) self.hass.block_till_done() self.hass.data[recorder.DATA_INSTANCE].block_till_done() with patch( "homeassistant.components.statistics.sensor.dt_util.utcnow", new=mock_now ), patch.object(StatisticsSensor, "_purge_old", mock_purge): # store some values for value in self.values: self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS}, ) self.hass.block_till_done() # insert the next value 1 hour later mock_data["return_time"] += timedelta(hours=1) # wait for the recorder to really store the data wait_recording_done(self.hass) # only now create the statistics component, so that it must read # the data from the database assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", "sampling_size": 100, "max_age": {"hours": max_age}, } }, ) self.hass.block_till_done() self.hass.block_till_done() self.hass.start() self.hass.block_till_done() # check if the result is as in test_sensor_source() state = self.hass.states.get("sensor.test") assert expected_min_age == state.attributes.get("min_age") # The max_age timestamp should be 1 hour before what we have right # now in mock_data['return_time']. assert mock_data["return_time"] == state.attributes.get("max_age") + timedelta( hours=1 )
pschmitt/home-assistant
tests/components/statistics/test_sensor.py
homeassistant/components/volvooncall/switch.py
"""Support for MySensors lights.""" from homeassistant.components import mysensors from homeassistant.components.light import ( ATTR_BRIGHTNESS, ATTR_HS_COLOR, ATTR_WHITE_VALUE, DOMAIN, SUPPORT_BRIGHTNESS, SUPPORT_COLOR, SUPPORT_WHITE_VALUE, LightEntity, ) from homeassistant.const import STATE_OFF, STATE_ON from homeassistant.core import callback import homeassistant.util.color as color_util from homeassistant.util.color import rgb_hex_to_rgb_list SUPPORT_MYSENSORS_RGBW = SUPPORT_COLOR | SUPPORT_WHITE_VALUE async def async_setup_platform(hass, config, async_add_entities, discovery_info=None): """Set up the mysensors platform for lights.""" device_class_map = { "S_DIMMER": MySensorsLightDimmer, "S_RGB_LIGHT": MySensorsLightRGB, "S_RGBW_LIGHT": MySensorsLightRGBW, } mysensors.setup_mysensors_platform( hass, DOMAIN, discovery_info, device_class_map, async_add_entities=async_add_entities, ) class MySensorsLight(mysensors.device.MySensorsEntity, LightEntity): """Representation of a MySensors Light child node.""" def __init__(self, *args): """Initialize a MySensors Light.""" super().__init__(*args) self._state = None self._brightness = None self._hs = None self._white = None @property def brightness(self): """Return the brightness of this light between 0..255.""" return self._brightness @property def hs_color(self): """Return the hs color value [int, int].""" return self._hs @property def white_value(self): """Return the white value of this light between 0..255.""" return self._white @property def assumed_state(self): """Return true if unable to access real state of entity.""" return self.gateway.optimistic @property def is_on(self): """Return true if device is on.""" return self._state def _turn_on_light(self): """Turn on light child device.""" set_req = self.gateway.const.SetReq if self._state: return self.gateway.set_child_value( self.node_id, self.child_id, set_req.V_LIGHT, 1, ack=1 ) if self.gateway.optimistic: # optimistically assume that light has changed state self._state = True self._values[set_req.V_LIGHT] = STATE_ON def _turn_on_dimmer(self, **kwargs): """Turn on dimmer child device.""" set_req = self.gateway.const.SetReq brightness = self._brightness if ( ATTR_BRIGHTNESS not in kwargs or kwargs[ATTR_BRIGHTNESS] == self._brightness or set_req.V_DIMMER not in self._values ): return brightness = kwargs[ATTR_BRIGHTNESS] percent = round(100 * brightness / 255) self.gateway.set_child_value( self.node_id, self.child_id, set_req.V_DIMMER, percent, ack=1 ) if self.gateway.optimistic: # optimistically assume that light has changed state self._brightness = brightness self._values[set_req.V_DIMMER] = percent def _turn_on_rgb_and_w(self, hex_template, **kwargs): """Turn on RGB or RGBW child device.""" rgb = list(color_util.color_hs_to_RGB(*self._hs)) white = self._white hex_color = self._values.get(self.value_type) hs_color = kwargs.get(ATTR_HS_COLOR) if hs_color is not None: new_rgb = color_util.color_hs_to_RGB(*hs_color) else: new_rgb = None new_white = kwargs.get(ATTR_WHITE_VALUE) if new_rgb is None and new_white is None: return if new_rgb is not None: rgb = list(new_rgb) if hex_template == "%02x%02x%02x%02x": if new_white is not None: rgb.append(new_white) else: rgb.append(white) hex_color = hex_template % tuple(rgb) if len(rgb) > 3: white = rgb.pop() self.gateway.set_child_value( self.node_id, self.child_id, self.value_type, hex_color, ack=1 ) if self.gateway.optimistic: # optimistically assume that light has changed state self._hs = color_util.color_RGB_to_hs(*rgb) self._white = white self._values[self.value_type] = hex_color async def async_turn_off(self, **kwargs): """Turn the device off.""" value_type = self.gateway.const.SetReq.V_LIGHT self.gateway.set_child_value(self.node_id, self.child_id, value_type, 0, ack=1) if self.gateway.optimistic: # optimistically assume that light has changed state self._state = False self._values[value_type] = STATE_OFF self.async_write_ha_state() @callback def _async_update_light(self): """Update the controller with values from light child.""" value_type = self.gateway.const.SetReq.V_LIGHT self._state = self._values[value_type] == STATE_ON @callback def _async_update_dimmer(self): """Update the controller with values from dimmer child.""" value_type = self.gateway.const.SetReq.V_DIMMER if value_type in self._values: self._brightness = round(255 * int(self._values[value_type]) / 100) if self._brightness == 0: self._state = False @callback def _async_update_rgb_or_w(self): """Update the controller with values from RGB or RGBW child.""" value = self._values[self.value_type] color_list = rgb_hex_to_rgb_list(value) if len(color_list) > 3: self._white = color_list.pop() self._hs = color_util.color_RGB_to_hs(*color_list) class MySensorsLightDimmer(MySensorsLight): """Dimmer child class to MySensorsLight.""" @property def supported_features(self): """Flag supported features.""" return SUPPORT_BRIGHTNESS async def async_turn_on(self, **kwargs): """Turn the device on.""" self._turn_on_light() self._turn_on_dimmer(**kwargs) if self.gateway.optimistic: self.async_write_ha_state() async def async_update(self): """Update the controller with the latest value from a sensor.""" await super().async_update() self._async_update_light() self._async_update_dimmer() class MySensorsLightRGB(MySensorsLight): """RGB child class to MySensorsLight.""" @property def supported_features(self): """Flag supported features.""" set_req = self.gateway.const.SetReq if set_req.V_DIMMER in self._values: return SUPPORT_BRIGHTNESS | SUPPORT_COLOR return SUPPORT_COLOR async def async_turn_on(self, **kwargs): """Turn the device on.""" self._turn_on_light() self._turn_on_dimmer(**kwargs) self._turn_on_rgb_and_w("%02x%02x%02x", **kwargs) if self.gateway.optimistic: self.async_write_ha_state() async def async_update(self): """Update the controller with the latest value from a sensor.""" await super().async_update() self._async_update_light() self._async_update_dimmer() self._async_update_rgb_or_w() class MySensorsLightRGBW(MySensorsLightRGB): """RGBW child class to MySensorsLightRGB.""" @property def supported_features(self): """Flag supported features.""" set_req = self.gateway.const.SetReq if set_req.V_DIMMER in self._values: return SUPPORT_BRIGHTNESS | SUPPORT_MYSENSORS_RGBW return SUPPORT_MYSENSORS_RGBW async def async_turn_on(self, **kwargs): """Turn the device on.""" self._turn_on_light() self._turn_on_dimmer(**kwargs) self._turn_on_rgb_and_w("%02x%02x%02x%02x", **kwargs) if self.gateway.optimistic: self.async_write_ha_state()
"""The test for the statistics sensor platform.""" from datetime import datetime, timedelta import statistics import unittest import pytest from homeassistant.components import recorder from homeassistant.components.statistics.sensor import StatisticsSensor from homeassistant.const import ATTR_UNIT_OF_MEASUREMENT, STATE_UNKNOWN, TEMP_CELSIUS from homeassistant.setup import setup_component from homeassistant.util import dt as dt_util from tests.async_mock import patch from tests.common import ( fire_time_changed, get_test_home_assistant, init_recorder_component, ) from tests.components.recorder.common import wait_recording_done @pytest.fixture(autouse=True) def mock_legacy_time(legacy_patchable_time): """Make time patchable for all the tests.""" yield class TestStatisticsSensor(unittest.TestCase): """Test the Statistics sensor.""" def setup_method(self, method): """Set up things to be run when tests are started.""" self.hass = get_test_home_assistant() self.values = [17, 20, 15.2, 5, 3.8, 9.2, 6.7, 14, 6] self.count = len(self.values) self.min = min(self.values) self.max = max(self.values) self.total = sum(self.values) self.mean = round(sum(self.values) / len(self.values), 2) self.median = round(statistics.median(self.values), 2) self.deviation = round(statistics.stdev(self.values), 2) self.variance = round(statistics.variance(self.values), 2) self.change = round(self.values[-1] - self.values[0], 2) self.average_change = round(self.change / (len(self.values) - 1), 2) self.change_rate = round(self.change / (60 * (self.count - 1)), 2) self.addCleanup(self.hass.stop) def test_binary_sensor_source(self): """Test if source is a sensor.""" values = ["on", "off", "on", "off", "on", "off", "on"] assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "binary_sensor.test_monitored", } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() for value in values: self.hass.states.set("binary_sensor.test_monitored", value) self.hass.block_till_done() state = self.hass.states.get("sensor.test") assert str(len(values)) == state.state def test_sensor_source(self): """Test if source is a sensor.""" assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() for value in self.values: self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS} ) self.hass.block_till_done() state = self.hass.states.get("sensor.test") assert str(self.mean) == state.state assert self.min == state.attributes.get("min_value") assert self.max == state.attributes.get("max_value") assert self.variance == state.attributes.get("variance") assert self.median == state.attributes.get("median") assert self.deviation == state.attributes.get("standard_deviation") assert self.mean == state.attributes.get("mean") assert self.count == state.attributes.get("count") assert self.total == state.attributes.get("total") assert TEMP_CELSIUS == state.attributes.get("unit_of_measurement") assert self.change == state.attributes.get("change") assert self.average_change == state.attributes.get("average_change") def test_sampling_size(self): """Test rotation.""" assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", "sampling_size": 5, } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() for value in self.values: self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS} ) self.hass.block_till_done() state = self.hass.states.get("sensor.test") assert 3.8 == state.attributes.get("min_value") assert 14 == state.attributes.get("max_value") def test_sampling_size_1(self): """Test validity of stats requiring only one sample.""" assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", "sampling_size": 1, } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() for value in self.values[-3:]: # just the last 3 will do self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS} ) self.hass.block_till_done() state = self.hass.states.get("sensor.test") # require only one data point assert self.values[-1] == state.attributes.get("min_value") assert self.values[-1] == state.attributes.get("max_value") assert self.values[-1] == state.attributes.get("mean") assert self.values[-1] == state.attributes.get("median") assert self.values[-1] == state.attributes.get("total") assert 0 == state.attributes.get("change") assert 0 == state.attributes.get("average_change") # require at least two data points assert STATE_UNKNOWN == state.attributes.get("variance") assert STATE_UNKNOWN == state.attributes.get("standard_deviation") def test_max_age(self): """Test value deprecation.""" now = dt_util.utcnow() mock_data = { "return_time": datetime(now.year + 1, 8, 2, 12, 23, tzinfo=dt_util.UTC) } def mock_now(): return mock_data["return_time"] with patch( "homeassistant.components.statistics.sensor.dt_util.utcnow", new=mock_now ): assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", "max_age": {"minutes": 3}, } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() for value in self.values: self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS}, ) self.hass.block_till_done() # insert the next value one minute later mock_data["return_time"] += timedelta(minutes=1) state = self.hass.states.get("sensor.test") assert 6 == state.attributes.get("min_value") assert 14 == state.attributes.get("max_value") def test_max_age_without_sensor_change(self): """Test value deprecation.""" now = dt_util.utcnow() mock_data = { "return_time": datetime(now.year + 1, 8, 2, 12, 23, tzinfo=dt_util.UTC) } def mock_now(): return mock_data["return_time"] with patch( "homeassistant.components.statistics.sensor.dt_util.utcnow", new=mock_now ): assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", "max_age": {"minutes": 3}, } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() for value in self.values: self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS}, ) self.hass.block_till_done() # insert the next value 30 seconds later mock_data["return_time"] += timedelta(seconds=30) state = self.hass.states.get("sensor.test") assert 3.8 == state.attributes.get("min_value") assert 15.2 == state.attributes.get("max_value") # wait for 3 minutes (max_age). mock_data["return_time"] += timedelta(minutes=3) fire_time_changed(self.hass, mock_data["return_time"]) self.hass.block_till_done() state = self.hass.states.get("sensor.test") assert state.attributes.get("min_value") == STATE_UNKNOWN assert state.attributes.get("max_value") == STATE_UNKNOWN assert state.attributes.get("count") == 0 def test_change_rate(self): """Test min_age/max_age and change_rate.""" now = dt_util.utcnow() mock_data = { "return_time": datetime(now.year + 1, 8, 2, 12, 23, 42, tzinfo=dt_util.UTC) } def mock_now(): return mock_data["return_time"] with patch( "homeassistant.components.statistics.sensor.dt_util.utcnow", new=mock_now ): assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() for value in self.values: self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS}, ) self.hass.block_till_done() # insert the next value one minute later mock_data["return_time"] += timedelta(minutes=1) state = self.hass.states.get("sensor.test") assert datetime( now.year + 1, 8, 2, 12, 23, 42, tzinfo=dt_util.UTC ) == state.attributes.get("min_age") assert datetime( now.year + 1, 8, 2, 12, 23 + self.count - 1, 42, tzinfo=dt_util.UTC ) == state.attributes.get("max_age") assert self.change_rate == state.attributes.get("change_rate") def test_initialize_from_database(self): """Test initializing the statistics from the database.""" # enable the recorder init_recorder_component(self.hass) self.hass.block_till_done() self.hass.data[recorder.DATA_INSTANCE].block_till_done() # store some values for value in self.values: self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS} ) self.hass.block_till_done() # wait for the recorder to really store the data wait_recording_done(self.hass) # only now create the statistics component, so that it must read the # data from the database assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", "sampling_size": 100, } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() # check if the result is as in test_sensor_source() state = self.hass.states.get("sensor.test") assert str(self.mean) == state.state def test_initialize_from_database_with_maxage(self): """Test initializing the statistics from the database.""" now = dt_util.utcnow() mock_data = { "return_time": datetime(now.year + 1, 8, 2, 12, 23, 42, tzinfo=dt_util.UTC) } def mock_now(): return mock_data["return_time"] # Testing correct retrieval from recorder, thus we do not # want purging to occur within the class itself. def mock_purge(self): return # Set maximum age to 3 hours. max_age = 3 # Determine what our minimum age should be based on test values. expected_min_age = mock_data["return_time"] + timedelta( hours=len(self.values) - max_age ) # enable the recorder init_recorder_component(self.hass) self.hass.block_till_done() self.hass.data[recorder.DATA_INSTANCE].block_till_done() with patch( "homeassistant.components.statistics.sensor.dt_util.utcnow", new=mock_now ), patch.object(StatisticsSensor, "_purge_old", mock_purge): # store some values for value in self.values: self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS}, ) self.hass.block_till_done() # insert the next value 1 hour later mock_data["return_time"] += timedelta(hours=1) # wait for the recorder to really store the data wait_recording_done(self.hass) # only now create the statistics component, so that it must read # the data from the database assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", "sampling_size": 100, "max_age": {"hours": max_age}, } }, ) self.hass.block_till_done() self.hass.block_till_done() self.hass.start() self.hass.block_till_done() # check if the result is as in test_sensor_source() state = self.hass.states.get("sensor.test") assert expected_min_age == state.attributes.get("min_age") # The max_age timestamp should be 1 hour before what we have right # now in mock_data['return_time']. assert mock_data["return_time"] == state.attributes.get("max_age") + timedelta( hours=1 )
pschmitt/home-assistant
tests/components/statistics/test_sensor.py
homeassistant/components/mysensors/light.py
"""An abstract class common to all Bond entities.""" from abc import abstractmethod from asyncio import TimeoutError as AsyncIOTimeoutError import logging from typing import Any, Dict, Optional from aiohttp import ClientError from homeassistant.const import ATTR_NAME from homeassistant.helpers.entity import Entity from .const import DOMAIN from .utils import BondDevice, BondHub _LOGGER = logging.getLogger(__name__) class BondEntity(Entity): """Generic Bond entity encapsulating common features of any Bond controlled device.""" def __init__(self, hub: BondHub, device: BondDevice): """Initialize entity with API and device info.""" self._hub = hub self._device = device self._available = True @property def unique_id(self) -> Optional[str]: """Get unique ID for the entity.""" return self._device.device_id @property def name(self) -> Optional[str]: """Get entity name.""" return self._device.name @property def device_info(self) -> Optional[Dict[str, Any]]: """Get a an HA device representing this Bond controlled device.""" return { ATTR_NAME: self.name, "identifiers": {(DOMAIN, self._device.device_id)}, "via_device": (DOMAIN, self._hub.bond_id), } @property def assumed_state(self) -> bool: """Let HA know this entity relies on an assumed state tracked by Bond.""" return True @property def available(self) -> bool: """Report availability of this entity based on last API call results.""" return self._available async def async_update(self): """Fetch assumed state of the cover from the hub using API.""" try: state: dict = await self._hub.bond.device_state(self._device.device_id) except (ClientError, AsyncIOTimeoutError, OSError) as error: if self._available: _LOGGER.warning( "Entity %s has become unavailable", self.entity_id, exc_info=error ) self._available = False else: if not self._available: _LOGGER.info("Entity %s has come back", self.entity_id) self._available = True self._apply_state(state) @abstractmethod def _apply_state(self, state: dict): raise NotImplementedError
"""The test for the statistics sensor platform.""" from datetime import datetime, timedelta import statistics import unittest import pytest from homeassistant.components import recorder from homeassistant.components.statistics.sensor import StatisticsSensor from homeassistant.const import ATTR_UNIT_OF_MEASUREMENT, STATE_UNKNOWN, TEMP_CELSIUS from homeassistant.setup import setup_component from homeassistant.util import dt as dt_util from tests.async_mock import patch from tests.common import ( fire_time_changed, get_test_home_assistant, init_recorder_component, ) from tests.components.recorder.common import wait_recording_done @pytest.fixture(autouse=True) def mock_legacy_time(legacy_patchable_time): """Make time patchable for all the tests.""" yield class TestStatisticsSensor(unittest.TestCase): """Test the Statistics sensor.""" def setup_method(self, method): """Set up things to be run when tests are started.""" self.hass = get_test_home_assistant() self.values = [17, 20, 15.2, 5, 3.8, 9.2, 6.7, 14, 6] self.count = len(self.values) self.min = min(self.values) self.max = max(self.values) self.total = sum(self.values) self.mean = round(sum(self.values) / len(self.values), 2) self.median = round(statistics.median(self.values), 2) self.deviation = round(statistics.stdev(self.values), 2) self.variance = round(statistics.variance(self.values), 2) self.change = round(self.values[-1] - self.values[0], 2) self.average_change = round(self.change / (len(self.values) - 1), 2) self.change_rate = round(self.change / (60 * (self.count - 1)), 2) self.addCleanup(self.hass.stop) def test_binary_sensor_source(self): """Test if source is a sensor.""" values = ["on", "off", "on", "off", "on", "off", "on"] assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "binary_sensor.test_monitored", } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() for value in values: self.hass.states.set("binary_sensor.test_monitored", value) self.hass.block_till_done() state = self.hass.states.get("sensor.test") assert str(len(values)) == state.state def test_sensor_source(self): """Test if source is a sensor.""" assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() for value in self.values: self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS} ) self.hass.block_till_done() state = self.hass.states.get("sensor.test") assert str(self.mean) == state.state assert self.min == state.attributes.get("min_value") assert self.max == state.attributes.get("max_value") assert self.variance == state.attributes.get("variance") assert self.median == state.attributes.get("median") assert self.deviation == state.attributes.get("standard_deviation") assert self.mean == state.attributes.get("mean") assert self.count == state.attributes.get("count") assert self.total == state.attributes.get("total") assert TEMP_CELSIUS == state.attributes.get("unit_of_measurement") assert self.change == state.attributes.get("change") assert self.average_change == state.attributes.get("average_change") def test_sampling_size(self): """Test rotation.""" assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", "sampling_size": 5, } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() for value in self.values: self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS} ) self.hass.block_till_done() state = self.hass.states.get("sensor.test") assert 3.8 == state.attributes.get("min_value") assert 14 == state.attributes.get("max_value") def test_sampling_size_1(self): """Test validity of stats requiring only one sample.""" assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", "sampling_size": 1, } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() for value in self.values[-3:]: # just the last 3 will do self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS} ) self.hass.block_till_done() state = self.hass.states.get("sensor.test") # require only one data point assert self.values[-1] == state.attributes.get("min_value") assert self.values[-1] == state.attributes.get("max_value") assert self.values[-1] == state.attributes.get("mean") assert self.values[-1] == state.attributes.get("median") assert self.values[-1] == state.attributes.get("total") assert 0 == state.attributes.get("change") assert 0 == state.attributes.get("average_change") # require at least two data points assert STATE_UNKNOWN == state.attributes.get("variance") assert STATE_UNKNOWN == state.attributes.get("standard_deviation") def test_max_age(self): """Test value deprecation.""" now = dt_util.utcnow() mock_data = { "return_time": datetime(now.year + 1, 8, 2, 12, 23, tzinfo=dt_util.UTC) } def mock_now(): return mock_data["return_time"] with patch( "homeassistant.components.statistics.sensor.dt_util.utcnow", new=mock_now ): assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", "max_age": {"minutes": 3}, } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() for value in self.values: self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS}, ) self.hass.block_till_done() # insert the next value one minute later mock_data["return_time"] += timedelta(minutes=1) state = self.hass.states.get("sensor.test") assert 6 == state.attributes.get("min_value") assert 14 == state.attributes.get("max_value") def test_max_age_without_sensor_change(self): """Test value deprecation.""" now = dt_util.utcnow() mock_data = { "return_time": datetime(now.year + 1, 8, 2, 12, 23, tzinfo=dt_util.UTC) } def mock_now(): return mock_data["return_time"] with patch( "homeassistant.components.statistics.sensor.dt_util.utcnow", new=mock_now ): assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", "max_age": {"minutes": 3}, } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() for value in self.values: self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS}, ) self.hass.block_till_done() # insert the next value 30 seconds later mock_data["return_time"] += timedelta(seconds=30) state = self.hass.states.get("sensor.test") assert 3.8 == state.attributes.get("min_value") assert 15.2 == state.attributes.get("max_value") # wait for 3 minutes (max_age). mock_data["return_time"] += timedelta(minutes=3) fire_time_changed(self.hass, mock_data["return_time"]) self.hass.block_till_done() state = self.hass.states.get("sensor.test") assert state.attributes.get("min_value") == STATE_UNKNOWN assert state.attributes.get("max_value") == STATE_UNKNOWN assert state.attributes.get("count") == 0 def test_change_rate(self): """Test min_age/max_age and change_rate.""" now = dt_util.utcnow() mock_data = { "return_time": datetime(now.year + 1, 8, 2, 12, 23, 42, tzinfo=dt_util.UTC) } def mock_now(): return mock_data["return_time"] with patch( "homeassistant.components.statistics.sensor.dt_util.utcnow", new=mock_now ): assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() for value in self.values: self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS}, ) self.hass.block_till_done() # insert the next value one minute later mock_data["return_time"] += timedelta(minutes=1) state = self.hass.states.get("sensor.test") assert datetime( now.year + 1, 8, 2, 12, 23, 42, tzinfo=dt_util.UTC ) == state.attributes.get("min_age") assert datetime( now.year + 1, 8, 2, 12, 23 + self.count - 1, 42, tzinfo=dt_util.UTC ) == state.attributes.get("max_age") assert self.change_rate == state.attributes.get("change_rate") def test_initialize_from_database(self): """Test initializing the statistics from the database.""" # enable the recorder init_recorder_component(self.hass) self.hass.block_till_done() self.hass.data[recorder.DATA_INSTANCE].block_till_done() # store some values for value in self.values: self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS} ) self.hass.block_till_done() # wait for the recorder to really store the data wait_recording_done(self.hass) # only now create the statistics component, so that it must read the # data from the database assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", "sampling_size": 100, } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() # check if the result is as in test_sensor_source() state = self.hass.states.get("sensor.test") assert str(self.mean) == state.state def test_initialize_from_database_with_maxage(self): """Test initializing the statistics from the database.""" now = dt_util.utcnow() mock_data = { "return_time": datetime(now.year + 1, 8, 2, 12, 23, 42, tzinfo=dt_util.UTC) } def mock_now(): return mock_data["return_time"] # Testing correct retrieval from recorder, thus we do not # want purging to occur within the class itself. def mock_purge(self): return # Set maximum age to 3 hours. max_age = 3 # Determine what our minimum age should be based on test values. expected_min_age = mock_data["return_time"] + timedelta( hours=len(self.values) - max_age ) # enable the recorder init_recorder_component(self.hass) self.hass.block_till_done() self.hass.data[recorder.DATA_INSTANCE].block_till_done() with patch( "homeassistant.components.statistics.sensor.dt_util.utcnow", new=mock_now ), patch.object(StatisticsSensor, "_purge_old", mock_purge): # store some values for value in self.values: self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS}, ) self.hass.block_till_done() # insert the next value 1 hour later mock_data["return_time"] += timedelta(hours=1) # wait for the recorder to really store the data wait_recording_done(self.hass) # only now create the statistics component, so that it must read # the data from the database assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", "sampling_size": 100, "max_age": {"hours": max_age}, } }, ) self.hass.block_till_done() self.hass.block_till_done() self.hass.start() self.hass.block_till_done() # check if the result is as in test_sensor_source() state = self.hass.states.get("sensor.test") assert expected_min_age == state.attributes.get("min_age") # The max_age timestamp should be 1 hour before what we have right # now in mock_data['return_time']. assert mock_data["return_time"] == state.attributes.get("max_age") + timedelta( hours=1 )
pschmitt/home-assistant
tests/components/statistics/test_sensor.py
homeassistant/components/bond/entity.py
"""Support for MyChevy.""" from datetime import timedelta import logging import threading import time import mychevy.mychevy as mc import voluptuous as vol from homeassistant.const import CONF_PASSWORD, CONF_USERNAME from homeassistant.helpers import config_validation as cv, discovery from homeassistant.util import Throttle DOMAIN = "mychevy" UPDATE_TOPIC = DOMAIN ERROR_TOPIC = f"{DOMAIN}_error" MYCHEVY_SUCCESS = "success" MYCHEVY_ERROR = "error" NOTIFICATION_ID = "mychevy_website_notification" NOTIFICATION_TITLE = "MyChevy website status" _LOGGER = logging.getLogger(__name__) MIN_TIME_BETWEEN_UPDATES = timedelta(minutes=30) ERROR_SLEEP_TIME = timedelta(minutes=30) CONF_COUNTRY = "country" DEFAULT_COUNTRY = "us" CONFIG_SCHEMA = vol.Schema( { DOMAIN: vol.Schema( { vol.Required(CONF_USERNAME): cv.string, vol.Required(CONF_PASSWORD): cv.string, vol.Optional(CONF_COUNTRY, default=DEFAULT_COUNTRY): vol.All( cv.string, vol.In(["us", "ca"]) ), } ) }, extra=vol.ALLOW_EXTRA, ) class EVSensorConfig: """The EV sensor configuration.""" def __init__( self, name, attr, unit_of_measurement=None, icon=None, extra_attrs=None ): """Create new sensor configuration.""" self.name = name self.attr = attr self.extra_attrs = extra_attrs or [] self.unit_of_measurement = unit_of_measurement self.icon = icon class EVBinarySensorConfig: """The EV binary sensor configuration.""" def __init__(self, name, attr, device_class=None): """Create new binary sensor configuration.""" self.name = name self.attr = attr self.device_class = device_class def setup(hass, base_config): """Set up the mychevy component.""" config = base_config.get(DOMAIN) email = config.get(CONF_USERNAME) password = config.get(CONF_PASSWORD) country = config.get(CONF_COUNTRY) hass.data[DOMAIN] = MyChevyHub( mc.MyChevy(email, password, country), hass, base_config ) hass.data[DOMAIN].start() return True class MyChevyHub(threading.Thread): """MyChevy Hub. Connecting to the mychevy website is done through a selenium webscraping process. That can only run synchronously. In order to prevent blocking of other parts of Home Assistant the architecture launches a polling loop in a thread. When new data is received, sensors are updated, and hass is signaled that there are updates. Sensors are not created until the first update, which will be 60 - 120 seconds after the platform starts. """ def __init__(self, client, hass, hass_config): """Initialize MyChevy Hub.""" super().__init__() self._client = client self.hass = hass self.hass_config = hass_config self.cars = [] self.status = None self.ready = False @Throttle(MIN_TIME_BETWEEN_UPDATES) def update(self): """Update sensors from mychevy website. This is a synchronous polling call that takes a very long time (like 2 to 3 minutes long time) """ self._client.login() self._client.get_cars() self.cars = self._client.cars if self.ready is not True: discovery.load_platform(self.hass, "sensor", DOMAIN, {}, self.hass_config) discovery.load_platform( self.hass, "binary_sensor", DOMAIN, {}, self.hass_config ) self.ready = True self.cars = self._client.update_cars() def get_car(self, vid): """Compatibility to work with one car.""" if self.cars: for car in self.cars: if car.vid == vid: return car return None def run(self): """Thread run loop.""" # We add the status device first outside of the loop # And then busy wait on threads while True: try: _LOGGER.info("Starting mychevy loop") self.update() self.hass.helpers.dispatcher.dispatcher_send(UPDATE_TOPIC) time.sleep(MIN_TIME_BETWEEN_UPDATES.seconds) except Exception: # pylint: disable=broad-except _LOGGER.exception( "Error updating mychevy data. " "This probably means the OnStar link is down again" ) self.hass.helpers.dispatcher.dispatcher_send(ERROR_TOPIC) time.sleep(ERROR_SLEEP_TIME.seconds)
"""The test for the statistics sensor platform.""" from datetime import datetime, timedelta import statistics import unittest import pytest from homeassistant.components import recorder from homeassistant.components.statistics.sensor import StatisticsSensor from homeassistant.const import ATTR_UNIT_OF_MEASUREMENT, STATE_UNKNOWN, TEMP_CELSIUS from homeassistant.setup import setup_component from homeassistant.util import dt as dt_util from tests.async_mock import patch from tests.common import ( fire_time_changed, get_test_home_assistant, init_recorder_component, ) from tests.components.recorder.common import wait_recording_done @pytest.fixture(autouse=True) def mock_legacy_time(legacy_patchable_time): """Make time patchable for all the tests.""" yield class TestStatisticsSensor(unittest.TestCase): """Test the Statistics sensor.""" def setup_method(self, method): """Set up things to be run when tests are started.""" self.hass = get_test_home_assistant() self.values = [17, 20, 15.2, 5, 3.8, 9.2, 6.7, 14, 6] self.count = len(self.values) self.min = min(self.values) self.max = max(self.values) self.total = sum(self.values) self.mean = round(sum(self.values) / len(self.values), 2) self.median = round(statistics.median(self.values), 2) self.deviation = round(statistics.stdev(self.values), 2) self.variance = round(statistics.variance(self.values), 2) self.change = round(self.values[-1] - self.values[0], 2) self.average_change = round(self.change / (len(self.values) - 1), 2) self.change_rate = round(self.change / (60 * (self.count - 1)), 2) self.addCleanup(self.hass.stop) def test_binary_sensor_source(self): """Test if source is a sensor.""" values = ["on", "off", "on", "off", "on", "off", "on"] assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "binary_sensor.test_monitored", } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() for value in values: self.hass.states.set("binary_sensor.test_monitored", value) self.hass.block_till_done() state = self.hass.states.get("sensor.test") assert str(len(values)) == state.state def test_sensor_source(self): """Test if source is a sensor.""" assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() for value in self.values: self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS} ) self.hass.block_till_done() state = self.hass.states.get("sensor.test") assert str(self.mean) == state.state assert self.min == state.attributes.get("min_value") assert self.max == state.attributes.get("max_value") assert self.variance == state.attributes.get("variance") assert self.median == state.attributes.get("median") assert self.deviation == state.attributes.get("standard_deviation") assert self.mean == state.attributes.get("mean") assert self.count == state.attributes.get("count") assert self.total == state.attributes.get("total") assert TEMP_CELSIUS == state.attributes.get("unit_of_measurement") assert self.change == state.attributes.get("change") assert self.average_change == state.attributes.get("average_change") def test_sampling_size(self): """Test rotation.""" assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", "sampling_size": 5, } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() for value in self.values: self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS} ) self.hass.block_till_done() state = self.hass.states.get("sensor.test") assert 3.8 == state.attributes.get("min_value") assert 14 == state.attributes.get("max_value") def test_sampling_size_1(self): """Test validity of stats requiring only one sample.""" assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", "sampling_size": 1, } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() for value in self.values[-3:]: # just the last 3 will do self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS} ) self.hass.block_till_done() state = self.hass.states.get("sensor.test") # require only one data point assert self.values[-1] == state.attributes.get("min_value") assert self.values[-1] == state.attributes.get("max_value") assert self.values[-1] == state.attributes.get("mean") assert self.values[-1] == state.attributes.get("median") assert self.values[-1] == state.attributes.get("total") assert 0 == state.attributes.get("change") assert 0 == state.attributes.get("average_change") # require at least two data points assert STATE_UNKNOWN == state.attributes.get("variance") assert STATE_UNKNOWN == state.attributes.get("standard_deviation") def test_max_age(self): """Test value deprecation.""" now = dt_util.utcnow() mock_data = { "return_time": datetime(now.year + 1, 8, 2, 12, 23, tzinfo=dt_util.UTC) } def mock_now(): return mock_data["return_time"] with patch( "homeassistant.components.statistics.sensor.dt_util.utcnow", new=mock_now ): assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", "max_age": {"minutes": 3}, } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() for value in self.values: self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS}, ) self.hass.block_till_done() # insert the next value one minute later mock_data["return_time"] += timedelta(minutes=1) state = self.hass.states.get("sensor.test") assert 6 == state.attributes.get("min_value") assert 14 == state.attributes.get("max_value") def test_max_age_without_sensor_change(self): """Test value deprecation.""" now = dt_util.utcnow() mock_data = { "return_time": datetime(now.year + 1, 8, 2, 12, 23, tzinfo=dt_util.UTC) } def mock_now(): return mock_data["return_time"] with patch( "homeassistant.components.statistics.sensor.dt_util.utcnow", new=mock_now ): assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", "max_age": {"minutes": 3}, } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() for value in self.values: self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS}, ) self.hass.block_till_done() # insert the next value 30 seconds later mock_data["return_time"] += timedelta(seconds=30) state = self.hass.states.get("sensor.test") assert 3.8 == state.attributes.get("min_value") assert 15.2 == state.attributes.get("max_value") # wait for 3 minutes (max_age). mock_data["return_time"] += timedelta(minutes=3) fire_time_changed(self.hass, mock_data["return_time"]) self.hass.block_till_done() state = self.hass.states.get("sensor.test") assert state.attributes.get("min_value") == STATE_UNKNOWN assert state.attributes.get("max_value") == STATE_UNKNOWN assert state.attributes.get("count") == 0 def test_change_rate(self): """Test min_age/max_age and change_rate.""" now = dt_util.utcnow() mock_data = { "return_time": datetime(now.year + 1, 8, 2, 12, 23, 42, tzinfo=dt_util.UTC) } def mock_now(): return mock_data["return_time"] with patch( "homeassistant.components.statistics.sensor.dt_util.utcnow", new=mock_now ): assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() for value in self.values: self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS}, ) self.hass.block_till_done() # insert the next value one minute later mock_data["return_time"] += timedelta(minutes=1) state = self.hass.states.get("sensor.test") assert datetime( now.year + 1, 8, 2, 12, 23, 42, tzinfo=dt_util.UTC ) == state.attributes.get("min_age") assert datetime( now.year + 1, 8, 2, 12, 23 + self.count - 1, 42, tzinfo=dt_util.UTC ) == state.attributes.get("max_age") assert self.change_rate == state.attributes.get("change_rate") def test_initialize_from_database(self): """Test initializing the statistics from the database.""" # enable the recorder init_recorder_component(self.hass) self.hass.block_till_done() self.hass.data[recorder.DATA_INSTANCE].block_till_done() # store some values for value in self.values: self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS} ) self.hass.block_till_done() # wait for the recorder to really store the data wait_recording_done(self.hass) # only now create the statistics component, so that it must read the # data from the database assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", "sampling_size": 100, } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() # check if the result is as in test_sensor_source() state = self.hass.states.get("sensor.test") assert str(self.mean) == state.state def test_initialize_from_database_with_maxage(self): """Test initializing the statistics from the database.""" now = dt_util.utcnow() mock_data = { "return_time": datetime(now.year + 1, 8, 2, 12, 23, 42, tzinfo=dt_util.UTC) } def mock_now(): return mock_data["return_time"] # Testing correct retrieval from recorder, thus we do not # want purging to occur within the class itself. def mock_purge(self): return # Set maximum age to 3 hours. max_age = 3 # Determine what our minimum age should be based on test values. expected_min_age = mock_data["return_time"] + timedelta( hours=len(self.values) - max_age ) # enable the recorder init_recorder_component(self.hass) self.hass.block_till_done() self.hass.data[recorder.DATA_INSTANCE].block_till_done() with patch( "homeassistant.components.statistics.sensor.dt_util.utcnow", new=mock_now ), patch.object(StatisticsSensor, "_purge_old", mock_purge): # store some values for value in self.values: self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS}, ) self.hass.block_till_done() # insert the next value 1 hour later mock_data["return_time"] += timedelta(hours=1) # wait for the recorder to really store the data wait_recording_done(self.hass) # only now create the statistics component, so that it must read # the data from the database assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", "sampling_size": 100, "max_age": {"hours": max_age}, } }, ) self.hass.block_till_done() self.hass.block_till_done() self.hass.start() self.hass.block_till_done() # check if the result is as in test_sensor_source() state = self.hass.states.get("sensor.test") assert expected_min_age == state.attributes.get("min_age") # The max_age timestamp should be 1 hour before what we have right # now in mock_data['return_time']. assert mock_data["return_time"] == state.attributes.get("max_age") + timedelta( hours=1 )
pschmitt/home-assistant
tests/components/statistics/test_sensor.py
homeassistant/components/mychevy/__init__.py
"""The BleBox devices integration.""" import asyncio import logging from blebox_uniapi.error import Error from blebox_uniapi.products import Products from blebox_uniapi.session import ApiHost from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_HOST, CONF_PORT from homeassistant.core import HomeAssistant, callback from homeassistant.exceptions import ConfigEntryNotReady from homeassistant.helpers.aiohttp_client import async_get_clientsession from homeassistant.helpers.entity import Entity from .const import DEFAULT_SETUP_TIMEOUT, DOMAIN, PRODUCT _LOGGER = logging.getLogger(__name__) PLATFORMS = ["cover", "sensor", "switch", "air_quality", "light", "climate"] PARALLEL_UPDATES = 0 async def async_setup(hass: HomeAssistant, config: dict): """Set up the BleBox devices component.""" return True async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry): """Set up BleBox devices from a config entry.""" websession = async_get_clientsession(hass) host = entry.data[CONF_HOST] port = entry.data[CONF_PORT] timeout = DEFAULT_SETUP_TIMEOUT api_host = ApiHost(host, port, timeout, websession, hass.loop) try: product = await Products.async_from_host(api_host) except Error as ex: _LOGGER.error("Identify failed at %s:%d (%s)", api_host.host, api_host.port, ex) raise ConfigEntryNotReady from ex domain = hass.data.setdefault(DOMAIN, {}) domain_entry = domain.setdefault(entry.entry_id, {}) product = domain_entry.setdefault(PRODUCT, product) for component in PLATFORMS: hass.async_create_task( hass.config_entries.async_forward_entry_setup(entry, component) ) return True async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry): """Unload a config entry.""" unload_ok = all( await asyncio.gather( *[ hass.config_entries.async_forward_entry_unload(entry, platform) for platform in PLATFORMS ] ) ) if unload_ok: hass.data[DOMAIN].pop(entry.entry_id) return unload_ok @callback def create_blebox_entities( hass, config_entry, async_add_entities, entity_klass, entity_type ): """Create entities from a BleBox product's features.""" product = hass.data[DOMAIN][config_entry.entry_id][PRODUCT] entities = [] if entity_type in product.features: for feature in product.features[entity_type]: entities.append(entity_klass(feature)) async_add_entities(entities, True) class BleBoxEntity(Entity): """Implements a common class for entities representing a BleBox feature.""" def __init__(self, feature): """Initialize a BleBox entity.""" self._feature = feature @property def name(self): """Return the internal entity name.""" return self._feature.full_name @property def unique_id(self): """Return a unique id.""" return self._feature.unique_id async def async_update(self): """Update the entity state.""" try: await self._feature.async_update() except Error as ex: _LOGGER.error("Updating '%s' failed: %s", self.name, ex) @property def device_info(self): """Return device information for this entity.""" product = self._feature.product return { "identifiers": {(DOMAIN, product.unique_id)}, "name": product.name, "manufacturer": product.brand, "model": product.model, "sw_version": product.firmware_version, }
"""The test for the statistics sensor platform.""" from datetime import datetime, timedelta import statistics import unittest import pytest from homeassistant.components import recorder from homeassistant.components.statistics.sensor import StatisticsSensor from homeassistant.const import ATTR_UNIT_OF_MEASUREMENT, STATE_UNKNOWN, TEMP_CELSIUS from homeassistant.setup import setup_component from homeassistant.util import dt as dt_util from tests.async_mock import patch from tests.common import ( fire_time_changed, get_test_home_assistant, init_recorder_component, ) from tests.components.recorder.common import wait_recording_done @pytest.fixture(autouse=True) def mock_legacy_time(legacy_patchable_time): """Make time patchable for all the tests.""" yield class TestStatisticsSensor(unittest.TestCase): """Test the Statistics sensor.""" def setup_method(self, method): """Set up things to be run when tests are started.""" self.hass = get_test_home_assistant() self.values = [17, 20, 15.2, 5, 3.8, 9.2, 6.7, 14, 6] self.count = len(self.values) self.min = min(self.values) self.max = max(self.values) self.total = sum(self.values) self.mean = round(sum(self.values) / len(self.values), 2) self.median = round(statistics.median(self.values), 2) self.deviation = round(statistics.stdev(self.values), 2) self.variance = round(statistics.variance(self.values), 2) self.change = round(self.values[-1] - self.values[0], 2) self.average_change = round(self.change / (len(self.values) - 1), 2) self.change_rate = round(self.change / (60 * (self.count - 1)), 2) self.addCleanup(self.hass.stop) def test_binary_sensor_source(self): """Test if source is a sensor.""" values = ["on", "off", "on", "off", "on", "off", "on"] assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "binary_sensor.test_monitored", } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() for value in values: self.hass.states.set("binary_sensor.test_monitored", value) self.hass.block_till_done() state = self.hass.states.get("sensor.test") assert str(len(values)) == state.state def test_sensor_source(self): """Test if source is a sensor.""" assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() for value in self.values: self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS} ) self.hass.block_till_done() state = self.hass.states.get("sensor.test") assert str(self.mean) == state.state assert self.min == state.attributes.get("min_value") assert self.max == state.attributes.get("max_value") assert self.variance == state.attributes.get("variance") assert self.median == state.attributes.get("median") assert self.deviation == state.attributes.get("standard_deviation") assert self.mean == state.attributes.get("mean") assert self.count == state.attributes.get("count") assert self.total == state.attributes.get("total") assert TEMP_CELSIUS == state.attributes.get("unit_of_measurement") assert self.change == state.attributes.get("change") assert self.average_change == state.attributes.get("average_change") def test_sampling_size(self): """Test rotation.""" assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", "sampling_size": 5, } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() for value in self.values: self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS} ) self.hass.block_till_done() state = self.hass.states.get("sensor.test") assert 3.8 == state.attributes.get("min_value") assert 14 == state.attributes.get("max_value") def test_sampling_size_1(self): """Test validity of stats requiring only one sample.""" assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", "sampling_size": 1, } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() for value in self.values[-3:]: # just the last 3 will do self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS} ) self.hass.block_till_done() state = self.hass.states.get("sensor.test") # require only one data point assert self.values[-1] == state.attributes.get("min_value") assert self.values[-1] == state.attributes.get("max_value") assert self.values[-1] == state.attributes.get("mean") assert self.values[-1] == state.attributes.get("median") assert self.values[-1] == state.attributes.get("total") assert 0 == state.attributes.get("change") assert 0 == state.attributes.get("average_change") # require at least two data points assert STATE_UNKNOWN == state.attributes.get("variance") assert STATE_UNKNOWN == state.attributes.get("standard_deviation") def test_max_age(self): """Test value deprecation.""" now = dt_util.utcnow() mock_data = { "return_time": datetime(now.year + 1, 8, 2, 12, 23, tzinfo=dt_util.UTC) } def mock_now(): return mock_data["return_time"] with patch( "homeassistant.components.statistics.sensor.dt_util.utcnow", new=mock_now ): assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", "max_age": {"minutes": 3}, } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() for value in self.values: self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS}, ) self.hass.block_till_done() # insert the next value one minute later mock_data["return_time"] += timedelta(minutes=1) state = self.hass.states.get("sensor.test") assert 6 == state.attributes.get("min_value") assert 14 == state.attributes.get("max_value") def test_max_age_without_sensor_change(self): """Test value deprecation.""" now = dt_util.utcnow() mock_data = { "return_time": datetime(now.year + 1, 8, 2, 12, 23, tzinfo=dt_util.UTC) } def mock_now(): return mock_data["return_time"] with patch( "homeassistant.components.statistics.sensor.dt_util.utcnow", new=mock_now ): assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", "max_age": {"minutes": 3}, } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() for value in self.values: self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS}, ) self.hass.block_till_done() # insert the next value 30 seconds later mock_data["return_time"] += timedelta(seconds=30) state = self.hass.states.get("sensor.test") assert 3.8 == state.attributes.get("min_value") assert 15.2 == state.attributes.get("max_value") # wait for 3 minutes (max_age). mock_data["return_time"] += timedelta(minutes=3) fire_time_changed(self.hass, mock_data["return_time"]) self.hass.block_till_done() state = self.hass.states.get("sensor.test") assert state.attributes.get("min_value") == STATE_UNKNOWN assert state.attributes.get("max_value") == STATE_UNKNOWN assert state.attributes.get("count") == 0 def test_change_rate(self): """Test min_age/max_age and change_rate.""" now = dt_util.utcnow() mock_data = { "return_time": datetime(now.year + 1, 8, 2, 12, 23, 42, tzinfo=dt_util.UTC) } def mock_now(): return mock_data["return_time"] with patch( "homeassistant.components.statistics.sensor.dt_util.utcnow", new=mock_now ): assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() for value in self.values: self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS}, ) self.hass.block_till_done() # insert the next value one minute later mock_data["return_time"] += timedelta(minutes=1) state = self.hass.states.get("sensor.test") assert datetime( now.year + 1, 8, 2, 12, 23, 42, tzinfo=dt_util.UTC ) == state.attributes.get("min_age") assert datetime( now.year + 1, 8, 2, 12, 23 + self.count - 1, 42, tzinfo=dt_util.UTC ) == state.attributes.get("max_age") assert self.change_rate == state.attributes.get("change_rate") def test_initialize_from_database(self): """Test initializing the statistics from the database.""" # enable the recorder init_recorder_component(self.hass) self.hass.block_till_done() self.hass.data[recorder.DATA_INSTANCE].block_till_done() # store some values for value in self.values: self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS} ) self.hass.block_till_done() # wait for the recorder to really store the data wait_recording_done(self.hass) # only now create the statistics component, so that it must read the # data from the database assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", "sampling_size": 100, } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() # check if the result is as in test_sensor_source() state = self.hass.states.get("sensor.test") assert str(self.mean) == state.state def test_initialize_from_database_with_maxage(self): """Test initializing the statistics from the database.""" now = dt_util.utcnow() mock_data = { "return_time": datetime(now.year + 1, 8, 2, 12, 23, 42, tzinfo=dt_util.UTC) } def mock_now(): return mock_data["return_time"] # Testing correct retrieval from recorder, thus we do not # want purging to occur within the class itself. def mock_purge(self): return # Set maximum age to 3 hours. max_age = 3 # Determine what our minimum age should be based on test values. expected_min_age = mock_data["return_time"] + timedelta( hours=len(self.values) - max_age ) # enable the recorder init_recorder_component(self.hass) self.hass.block_till_done() self.hass.data[recorder.DATA_INSTANCE].block_till_done() with patch( "homeassistant.components.statistics.sensor.dt_util.utcnow", new=mock_now ), patch.object(StatisticsSensor, "_purge_old", mock_purge): # store some values for value in self.values: self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS}, ) self.hass.block_till_done() # insert the next value 1 hour later mock_data["return_time"] += timedelta(hours=1) # wait for the recorder to really store the data wait_recording_done(self.hass) # only now create the statistics component, so that it must read # the data from the database assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", "sampling_size": 100, "max_age": {"hours": max_age}, } }, ) self.hass.block_till_done() self.hass.block_till_done() self.hass.start() self.hass.block_till_done() # check if the result is as in test_sensor_source() state = self.hass.states.get("sensor.test") assert expected_min_age == state.attributes.get("min_age") # The max_age timestamp should be 1 hour before what we have right # now in mock_data['return_time']. assert mock_data["return_time"] == state.attributes.get("max_age") + timedelta( hours=1 )
pschmitt/home-assistant
tests/components/statistics/test_sensor.py
homeassistant/components/blebox/__init__.py
"""Support for Z-Wave.""" # pylint: disable=import-outside-toplevel import asyncio import copy from importlib import import_module import logging from pprint import pprint import voluptuous as vol from homeassistant import config_entries from homeassistant.const import ( ATTR_ENTITY_ID, EVENT_HOMEASSISTANT_START, EVENT_HOMEASSISTANT_STOP, ) from homeassistant.core import CoreState, callback from homeassistant.helpers import discovery import homeassistant.helpers.config_validation as cv from homeassistant.helpers.device_registry import ( async_get_registry as async_get_device_registry, ) from homeassistant.helpers.dispatcher import ( async_dispatcher_connect, async_dispatcher_send, ) from homeassistant.helpers.entity import generate_entity_id from homeassistant.helpers.entity_component import DEFAULT_SCAN_INTERVAL from homeassistant.helpers.entity_platform import EntityPlatform from homeassistant.helpers.entity_registry import ( async_get_registry as async_get_entity_registry, ) from homeassistant.helpers.entity_values import EntityValues from homeassistant.helpers.event import async_track_time_change from homeassistant.util import convert import homeassistant.util.dt as dt_util from . import config_flow # noqa: F401 pylint: disable=unused-import from . import const, websocket_api as wsapi, workaround from .const import ( CONF_AUTOHEAL, CONF_CONFIG_PATH, CONF_DEBUG, CONF_NETWORK_KEY, CONF_POLLING_INTERVAL, CONF_USB_STICK_PATH, DATA_DEVICES, DATA_ENTITY_VALUES, DATA_NETWORK, DATA_ZWAVE_CONFIG, DEFAULT_CONF_AUTOHEAL, DEFAULT_CONF_USB_STICK_PATH, DEFAULT_DEBUG, DEFAULT_POLLING_INTERVAL, DOMAIN, ) from .discovery_schemas import DISCOVERY_SCHEMAS from .node_entity import ZWaveBaseEntity, ZWaveNodeEntity from .util import ( check_has_unique_id, check_node_schema, check_value_schema, is_node_parsed, node_device_id_and_name, node_name, ) _LOGGER = logging.getLogger(__name__) CLASS_ID = "class_id" ATTR_POWER = "power_consumption" CONF_POLLING_INTENSITY = "polling_intensity" CONF_IGNORED = "ignored" CONF_INVERT_OPENCLOSE_BUTTONS = "invert_openclose_buttons" CONF_INVERT_PERCENT = "invert_percent" CONF_REFRESH_VALUE = "refresh_value" CONF_REFRESH_DELAY = "delay" CONF_DEVICE_CONFIG = "device_config" CONF_DEVICE_CONFIG_GLOB = "device_config_glob" CONF_DEVICE_CONFIG_DOMAIN = "device_config_domain" DEFAULT_CONF_IGNORED = False DEFAULT_CONF_INVERT_OPENCLOSE_BUTTONS = False DEFAULT_CONF_INVERT_PERCENT = False DEFAULT_CONF_REFRESH_VALUE = False DEFAULT_CONF_REFRESH_DELAY = 5 SUPPORTED_PLATFORMS = [ "binary_sensor", "climate", "cover", "fan", "lock", "light", "sensor", "switch", ] RENAME_NODE_SCHEMA = vol.Schema( { vol.Required(const.ATTR_NODE_ID): vol.Coerce(int), vol.Required(const.ATTR_NAME): cv.string, vol.Optional(const.ATTR_UPDATE_IDS, default=False): cv.boolean, } ) RENAME_VALUE_SCHEMA = vol.Schema( { vol.Required(const.ATTR_NODE_ID): vol.Coerce(int), vol.Required(const.ATTR_VALUE_ID): vol.Coerce(int), vol.Required(const.ATTR_NAME): cv.string, vol.Optional(const.ATTR_UPDATE_IDS, default=False): cv.boolean, } ) SET_CONFIG_PARAMETER_SCHEMA = vol.Schema( { vol.Required(const.ATTR_NODE_ID): vol.Coerce(int), vol.Required(const.ATTR_CONFIG_PARAMETER): vol.Coerce(int), vol.Required(const.ATTR_CONFIG_VALUE): vol.Any(vol.Coerce(int), cv.string), vol.Optional(const.ATTR_CONFIG_SIZE, default=2): vol.Coerce(int), } ) SET_NODE_VALUE_SCHEMA = vol.Schema( { vol.Required(const.ATTR_NODE_ID): vol.Coerce(int), vol.Required(const.ATTR_VALUE_ID): vol.Any(vol.Coerce(int), cv.string), vol.Required(const.ATTR_CONFIG_VALUE): vol.Any(vol.Coerce(int), cv.string), } ) REFRESH_NODE_VALUE_SCHEMA = vol.Schema( { vol.Required(const.ATTR_NODE_ID): vol.Coerce(int), vol.Required(const.ATTR_VALUE_ID): vol.Coerce(int), } ) SET_POLL_INTENSITY_SCHEMA = vol.Schema( { vol.Required(const.ATTR_NODE_ID): vol.Coerce(int), vol.Required(const.ATTR_VALUE_ID): vol.Coerce(int), vol.Required(const.ATTR_POLL_INTENSITY): vol.Coerce(int), } ) PRINT_CONFIG_PARAMETER_SCHEMA = vol.Schema( { vol.Required(const.ATTR_NODE_ID): vol.Coerce(int), vol.Required(const.ATTR_CONFIG_PARAMETER): vol.Coerce(int), } ) NODE_SERVICE_SCHEMA = vol.Schema({vol.Required(const.ATTR_NODE_ID): vol.Coerce(int)}) REFRESH_ENTITY_SCHEMA = vol.Schema({vol.Required(ATTR_ENTITY_ID): cv.entity_id}) RESET_NODE_METERS_SCHEMA = vol.Schema( { vol.Required(const.ATTR_NODE_ID): vol.Coerce(int), vol.Optional(const.ATTR_INSTANCE, default=1): vol.Coerce(int), } ) CHANGE_ASSOCIATION_SCHEMA = vol.Schema( { vol.Required(const.ATTR_ASSOCIATION): cv.string, vol.Required(const.ATTR_NODE_ID): vol.Coerce(int), vol.Required(const.ATTR_TARGET_NODE_ID): vol.Coerce(int), vol.Required(const.ATTR_GROUP): vol.Coerce(int), vol.Optional(const.ATTR_INSTANCE, default=0x00): vol.Coerce(int), } ) SET_WAKEUP_SCHEMA = vol.Schema( { vol.Required(const.ATTR_NODE_ID): vol.Coerce(int), vol.Required(const.ATTR_CONFIG_VALUE): vol.All( vol.Coerce(int), cv.positive_int ), } ) HEAL_NODE_SCHEMA = vol.Schema( { vol.Required(const.ATTR_NODE_ID): vol.Coerce(int), vol.Optional(const.ATTR_RETURN_ROUTES, default=False): cv.boolean, } ) TEST_NODE_SCHEMA = vol.Schema( { vol.Required(const.ATTR_NODE_ID): vol.Coerce(int), vol.Optional(const.ATTR_MESSAGES, default=1): cv.positive_int, } ) DEVICE_CONFIG_SCHEMA_ENTRY = vol.Schema( { vol.Optional(CONF_POLLING_INTENSITY): cv.positive_int, vol.Optional(CONF_IGNORED, default=DEFAULT_CONF_IGNORED): cv.boolean, vol.Optional( CONF_INVERT_OPENCLOSE_BUTTONS, default=DEFAULT_CONF_INVERT_OPENCLOSE_BUTTONS ): cv.boolean, vol.Optional( CONF_INVERT_PERCENT, default=DEFAULT_CONF_INVERT_PERCENT ): cv.boolean, vol.Optional( CONF_REFRESH_VALUE, default=DEFAULT_CONF_REFRESH_VALUE ): cv.boolean, vol.Optional( CONF_REFRESH_DELAY, default=DEFAULT_CONF_REFRESH_DELAY ): cv.positive_int, } ) SIGNAL_REFRESH_ENTITY_FORMAT = "zwave_refresh_entity_{}" CONFIG_SCHEMA = vol.Schema( { DOMAIN: vol.Schema( { vol.Optional(CONF_AUTOHEAL, default=DEFAULT_CONF_AUTOHEAL): cv.boolean, vol.Optional(CONF_CONFIG_PATH): cv.string, vol.Optional(CONF_NETWORK_KEY): vol.All( cv.string, vol.Match(r"(0x\w\w,\s?){15}0x\w\w") ), vol.Optional(CONF_DEVICE_CONFIG, default={}): vol.Schema( {cv.entity_id: DEVICE_CONFIG_SCHEMA_ENTRY} ), vol.Optional(CONF_DEVICE_CONFIG_GLOB, default={}): vol.Schema( {cv.string: DEVICE_CONFIG_SCHEMA_ENTRY} ), vol.Optional(CONF_DEVICE_CONFIG_DOMAIN, default={}): vol.Schema( {cv.string: DEVICE_CONFIG_SCHEMA_ENTRY} ), vol.Optional(CONF_DEBUG, default=DEFAULT_DEBUG): cv.boolean, vol.Optional( CONF_POLLING_INTERVAL, default=DEFAULT_POLLING_INTERVAL ): cv.positive_int, vol.Optional(CONF_USB_STICK_PATH): cv.string, } ) }, extra=vol.ALLOW_EXTRA, ) def _obj_to_dict(obj): """Convert an object into a hash for debug.""" return { key: getattr(obj, key) for key in dir(obj) if key[0] != "_" and not callable(getattr(obj, key)) } def _value_name(value): """Return the name of the value.""" return f"{node_name(value.node)} {value.label}".strip() def nice_print_node(node): """Print a nice formatted node to the output (debug method).""" node_dict = _obj_to_dict(node) node_dict["values"] = { value_id: _obj_to_dict(value) for value_id, value in node.values.items() } _LOGGER.info("FOUND NODE %s \n%s", node.product_name, node_dict) def get_config_value(node, value_index, tries=5): """Return the current configuration value for a specific index.""" try: for value in node.values.values(): if ( value.command_class == const.COMMAND_CLASS_CONFIGURATION and value.index == value_index ): return value.data except RuntimeError: # If we get a runtime error the dict has changed while # we was looking for a value, just do it again return ( None if tries <= 0 else get_config_value(node, value_index, tries=tries - 1) ) return None async def async_setup_platform(hass, config, async_add_entities, discovery_info=None): """Set up the Z-Wave platform (generic part).""" if discovery_info is None or DATA_NETWORK not in hass.data: return False device = hass.data[DATA_DEVICES].get(discovery_info[const.DISCOVERY_DEVICE]) if device is None: return False async_add_entities([device]) return True async def async_setup(hass, config): """Set up Z-Wave components.""" if DOMAIN not in config: return True conf = config[DOMAIN] hass.data[DATA_ZWAVE_CONFIG] = conf if not hass.config_entries.async_entries(DOMAIN): hass.async_create_task( hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data={ CONF_USB_STICK_PATH: conf.get( CONF_USB_STICK_PATH, DEFAULT_CONF_USB_STICK_PATH ), CONF_NETWORK_KEY: conf.get(CONF_NETWORK_KEY), }, ) ) return True async def async_setup_entry(hass, config_entry): """Set up Z-Wave from a config entry. Will automatically load components to support devices found on the network. """ from pydispatch import dispatcher # pylint: disable=import-error from openzwave.option import ZWaveOption from openzwave.network import ZWaveNetwork from openzwave.group import ZWaveGroup # Merge config entry and yaml config config = config_entry.data if DATA_ZWAVE_CONFIG in hass.data: config = {**config, **hass.data[DATA_ZWAVE_CONFIG]} # Update hass.data with merged config so we can access it elsewhere hass.data[DATA_ZWAVE_CONFIG] = config # Load configuration use_debug = config.get(CONF_DEBUG, DEFAULT_DEBUG) autoheal = config.get(CONF_AUTOHEAL, DEFAULT_CONF_AUTOHEAL) device_config = EntityValues( config.get(CONF_DEVICE_CONFIG), config.get(CONF_DEVICE_CONFIG_DOMAIN), config.get(CONF_DEVICE_CONFIG_GLOB), ) usb_path = config[CONF_USB_STICK_PATH] _LOGGER.info("Z-Wave USB path is %s", usb_path) # Setup options options = ZWaveOption( usb_path, user_path=hass.config.config_dir, config_path=config.get(CONF_CONFIG_PATH), ) options.set_console_output(use_debug) if config.get(CONF_NETWORK_KEY): options.addOption("NetworkKey", config[CONF_NETWORK_KEY]) await hass.async_add_executor_job(options.lock) network = hass.data[DATA_NETWORK] = ZWaveNetwork(options, autostart=False) hass.data[DATA_DEVICES] = {} hass.data[DATA_ENTITY_VALUES] = [] registry = await async_get_entity_registry(hass) wsapi.async_load_websocket_api(hass) if use_debug: # pragma: no cover def log_all(signal, value=None): """Log all the signals.""" print("") print("SIGNAL *****", signal) if value and signal in ( ZWaveNetwork.SIGNAL_VALUE_CHANGED, ZWaveNetwork.SIGNAL_VALUE_ADDED, ZWaveNetwork.SIGNAL_SCENE_EVENT, ZWaveNetwork.SIGNAL_NODE_EVENT, ZWaveNetwork.SIGNAL_AWAKE_NODES_QUERIED, ZWaveNetwork.SIGNAL_ALL_NODES_QUERIED, ZWaveNetwork.SIGNAL_ALL_NODES_QUERIED_SOME_DEAD, ): pprint(_obj_to_dict(value)) print("") dispatcher.connect(log_all, weak=False) def value_added(node, value): """Handle new added value to a node on the network.""" # Check if this value should be tracked by an existing entity for values in hass.data[DATA_ENTITY_VALUES]: values.check_value(value) for schema in DISCOVERY_SCHEMAS: if not check_node_schema(node, schema): continue if not check_value_schema( value, schema[const.DISC_VALUES][const.DISC_PRIMARY] ): continue values = ZWaveDeviceEntityValues( hass, schema, value, config, device_config, registry ) # We create a new list and update the reference here so that # the list can be safely iterated over in the main thread new_values = hass.data[DATA_ENTITY_VALUES] + [values] hass.data[DATA_ENTITY_VALUES] = new_values platform = EntityPlatform( hass=hass, logger=_LOGGER, domain=DOMAIN, platform_name=DOMAIN, platform=None, scan_interval=DEFAULT_SCAN_INTERVAL, entity_namespace=None, ) platform.config_entry = config_entry def node_added(node): """Handle a new node on the network.""" entity = ZWaveNodeEntity(node, network) async def _add_node_to_component(): if hass.data[DATA_DEVICES].get(entity.unique_id): return name = node_name(node) generated_id = generate_entity_id(DOMAIN + ".{}", name, []) node_config = device_config.get(generated_id) if node_config.get(CONF_IGNORED): _LOGGER.info( "Ignoring node entity %s due to device settings", generated_id ) return hass.data[DATA_DEVICES][entity.unique_id] = entity await platform.async_add_entities([entity]) if entity.unique_id: hass.async_add_job(_add_node_to_component()) return @callback def _on_ready(sec): _LOGGER.info("Z-Wave node %d ready after %d seconds", entity.node_id, sec) hass.async_add_job(_add_node_to_component) @callback def _on_timeout(sec): _LOGGER.warning( "Z-Wave node %d not ready after %d seconds, continuing anyway", entity.node_id, sec, ) hass.async_add_job(_add_node_to_component) hass.add_job(check_has_unique_id, entity, _on_ready, _on_timeout) def node_removed(node): node_id = node.node_id node_key = f"node-{node_id}" for key in list(hass.data[DATA_DEVICES]): if key is None: continue if not key.startswith(f"{node_id}-"): continue entity = hass.data[DATA_DEVICES][key] _LOGGER.debug( "Removing Entity - value: %s - entity_id: %s", key, entity.entity_id ) hass.add_job(entity.node_removed()) del hass.data[DATA_DEVICES][key] entity = hass.data[DATA_DEVICES][node_key] hass.add_job(entity.node_removed()) del hass.data[DATA_DEVICES][node_key] hass.add_job(_remove_device(node)) async def _remove_device(node): dev_reg = await async_get_device_registry(hass) identifier, name = node_device_id_and_name(node) device = dev_reg.async_get_device(identifiers={identifier}, connections=set()) if device is not None: _LOGGER.debug("Removing Device - %s - %s", device.id, name) dev_reg.async_remove_device(device.id) def network_ready(): """Handle the query of all awake nodes.""" _LOGGER.info( "Z-Wave network is ready for use. All awake nodes " "have been queried. Sleeping nodes will be " "queried when they awake" ) hass.bus.fire(const.EVENT_NETWORK_READY) def network_complete(): """Handle the querying of all nodes on network.""" _LOGGER.info( "Z-Wave network is complete. All nodes on the network have been queried" ) hass.bus.fire(const.EVENT_NETWORK_COMPLETE) def network_complete_some_dead(): """Handle the querying of all nodes on network.""" _LOGGER.info( "Z-Wave network is complete. All nodes on the network " "have been queried, but some nodes are marked dead" ) hass.bus.fire(const.EVENT_NETWORK_COMPLETE_SOME_DEAD) dispatcher.connect(value_added, ZWaveNetwork.SIGNAL_VALUE_ADDED, weak=False) dispatcher.connect(node_added, ZWaveNetwork.SIGNAL_NODE_ADDED, weak=False) dispatcher.connect(node_removed, ZWaveNetwork.SIGNAL_NODE_REMOVED, weak=False) dispatcher.connect( network_ready, ZWaveNetwork.SIGNAL_AWAKE_NODES_QUERIED, weak=False ) dispatcher.connect( network_complete, ZWaveNetwork.SIGNAL_ALL_NODES_QUERIED, weak=False ) dispatcher.connect( network_complete_some_dead, ZWaveNetwork.SIGNAL_ALL_NODES_QUERIED_SOME_DEAD, weak=False, ) def add_node(service): """Switch into inclusion mode.""" _LOGGER.info("Z-Wave add_node have been initialized") network.controller.add_node() def add_node_secure(service): """Switch into secure inclusion mode.""" _LOGGER.info("Z-Wave add_node_secure have been initialized") network.controller.add_node(True) def remove_node(service): """Switch into exclusion mode.""" _LOGGER.info("Z-Wave remove_node have been initialized") network.controller.remove_node() def cancel_command(service): """Cancel a running controller command.""" _LOGGER.info("Cancel running Z-Wave command") network.controller.cancel_command() def heal_network(service): """Heal the network.""" _LOGGER.info("Z-Wave heal running") network.heal() def soft_reset(service): """Soft reset the controller.""" _LOGGER.info("Z-Wave soft_reset have been initialized") network.controller.soft_reset() def test_network(service): """Test the network by sending commands to all the nodes.""" _LOGGER.info("Z-Wave test_network have been initialized") network.test() def stop_network(_service_or_event): """Stop Z-Wave network.""" _LOGGER.info("Stopping Z-Wave network") network.stop() if hass.state == CoreState.running: hass.bus.fire(const.EVENT_NETWORK_STOP) async def rename_node(service): """Rename a node.""" node_id = service.data.get(const.ATTR_NODE_ID) node = network.nodes[node_id] name = service.data.get(const.ATTR_NAME) node.name = name _LOGGER.info("Renamed Z-Wave node %d to %s", node_id, name) update_ids = service.data.get(const.ATTR_UPDATE_IDS) # We want to rename the device, the node entity, # and all the contained entities node_key = f"node-{node_id}" entity = hass.data[DATA_DEVICES][node_key] await entity.node_renamed(update_ids) for key in list(hass.data[DATA_DEVICES]): if not key.startswith(f"{node_id}-"): continue entity = hass.data[DATA_DEVICES][key] await entity.value_renamed(update_ids) async def rename_value(service): """Rename a node value.""" node_id = service.data.get(const.ATTR_NODE_ID) value_id = service.data.get(const.ATTR_VALUE_ID) node = network.nodes[node_id] value = node.values[value_id] name = service.data.get(const.ATTR_NAME) value.label = name _LOGGER.info( "Renamed Z-Wave value (Node %d Value %d) to %s", node_id, value_id, name ) update_ids = service.data.get(const.ATTR_UPDATE_IDS) value_key = f"{node_id}-{value_id}" entity = hass.data[DATA_DEVICES][value_key] await entity.value_renamed(update_ids) def set_poll_intensity(service): """Set the polling intensity of a node value.""" node_id = service.data.get(const.ATTR_NODE_ID) value_id = service.data.get(const.ATTR_VALUE_ID) node = network.nodes[node_id] value = node.values[value_id] intensity = service.data.get(const.ATTR_POLL_INTENSITY) if intensity == 0: if value.disable_poll(): _LOGGER.info("Polling disabled (Node %d Value %d)", node_id, value_id) return _LOGGER.info( "Polling disabled failed (Node %d Value %d)", node_id, value_id ) else: if value.enable_poll(intensity): _LOGGER.info( "Set polling intensity (Node %d Value %d) to %s", node_id, value_id, intensity, ) return _LOGGER.info( "Set polling intensity failed (Node %d Value %d)", node_id, value_id ) def remove_failed_node(service): """Remove failed node.""" node_id = service.data.get(const.ATTR_NODE_ID) _LOGGER.info("Trying to remove zwave node %d", node_id) network.controller.remove_failed_node(node_id) def replace_failed_node(service): """Replace failed node.""" node_id = service.data.get(const.ATTR_NODE_ID) _LOGGER.info("Trying to replace zwave node %d", node_id) network.controller.replace_failed_node(node_id) def set_config_parameter(service): """Set a config parameter to a node.""" node_id = service.data.get(const.ATTR_NODE_ID) node = network.nodes[node_id] param = service.data.get(const.ATTR_CONFIG_PARAMETER) selection = service.data.get(const.ATTR_CONFIG_VALUE) size = service.data.get(const.ATTR_CONFIG_SIZE) for value in node.get_values( class_id=const.COMMAND_CLASS_CONFIGURATION ).values(): if value.index != param: continue if value.type == const.TYPE_BOOL: value.data = int(selection == "True") _LOGGER.info( "Setting configuration parameter %s on Node %s with bool selection %s", param, node_id, str(selection), ) return if value.type == const.TYPE_LIST: value.data = str(selection) _LOGGER.info( "Setting configuration parameter %s on Node %s with list selection %s", param, node_id, str(selection), ) return if value.type == const.TYPE_BUTTON: network.manager.pressButton(value.value_id) network.manager.releaseButton(value.value_id) _LOGGER.info( "Setting configuration parameter %s on Node %s " "with button selection %s", param, node_id, selection, ) return value.data = int(selection) _LOGGER.info( "Setting configuration parameter %s on Node %s with selection %s", param, node_id, selection, ) return node.set_config_param(param, selection, size) _LOGGER.info( "Setting unknown configuration parameter %s on Node %s with selection %s", param, node_id, selection, ) def refresh_node_value(service): """Refresh the specified value from a node.""" node_id = service.data.get(const.ATTR_NODE_ID) value_id = service.data.get(const.ATTR_VALUE_ID) node = network.nodes[node_id] node.values[value_id].refresh() _LOGGER.info("Node %s value %s refreshed", node_id, value_id) def set_node_value(service): """Set the specified value on a node.""" node_id = service.data.get(const.ATTR_NODE_ID) value_id = service.data.get(const.ATTR_VALUE_ID) value = service.data.get(const.ATTR_CONFIG_VALUE) node = network.nodes[node_id] node.values[value_id].data = value _LOGGER.info("Node %s value %s set to %s", node_id, value_id, value) def print_config_parameter(service): """Print a config parameter from a node.""" node_id = service.data.get(const.ATTR_NODE_ID) node = network.nodes[node_id] param = service.data.get(const.ATTR_CONFIG_PARAMETER) _LOGGER.info( "Config parameter %s on Node %s: %s", param, node_id, get_config_value(node, param), ) def print_node(service): """Print all information about z-wave node.""" node_id = service.data.get(const.ATTR_NODE_ID) node = network.nodes[node_id] nice_print_node(node) def set_wakeup(service): """Set wake-up interval of a node.""" node_id = service.data.get(const.ATTR_NODE_ID) node = network.nodes[node_id] value = service.data.get(const.ATTR_CONFIG_VALUE) if node.can_wake_up(): for value_id in node.get_values(class_id=const.COMMAND_CLASS_WAKE_UP): node.values[value_id].data = value _LOGGER.info("Node %s wake-up set to %d", node_id, value) else: _LOGGER.info("Node %s is not wakeable", node_id) def change_association(service): """Change an association in the zwave network.""" association_type = service.data.get(const.ATTR_ASSOCIATION) node_id = service.data.get(const.ATTR_NODE_ID) target_node_id = service.data.get(const.ATTR_TARGET_NODE_ID) group = service.data.get(const.ATTR_GROUP) instance = service.data.get(const.ATTR_INSTANCE) node = ZWaveGroup(group, network, node_id) if association_type == "add": node.add_association(target_node_id, instance) _LOGGER.info( "Adding association for node:%s in group:%s " "target node:%s, instance=%s", node_id, group, target_node_id, instance, ) if association_type == "remove": node.remove_association(target_node_id, instance) _LOGGER.info( "Removing association for node:%s in group:%s " "target node:%s, instance=%s", node_id, group, target_node_id, instance, ) async def async_refresh_entity(service): """Refresh values that specific entity depends on.""" entity_id = service.data.get(ATTR_ENTITY_ID) async_dispatcher_send(hass, SIGNAL_REFRESH_ENTITY_FORMAT.format(entity_id)) def refresh_node(service): """Refresh all node info.""" node_id = service.data.get(const.ATTR_NODE_ID) node = network.nodes[node_id] node.refresh_info() def reset_node_meters(service): """Reset meter counters of a node.""" node_id = service.data.get(const.ATTR_NODE_ID) instance = service.data.get(const.ATTR_INSTANCE) node = network.nodes[node_id] for value in node.get_values(class_id=const.COMMAND_CLASS_METER).values(): if value.index != const.INDEX_METER_RESET: continue if value.instance != instance: continue network.manager.pressButton(value.value_id) network.manager.releaseButton(value.value_id) _LOGGER.info( "Resetting meters on node %s instance %s....", node_id, instance ) return _LOGGER.info( "Node %s on instance %s does not have resettable meters", node_id, instance ) def heal_node(service): """Heal a node on the network.""" node_id = service.data.get(const.ATTR_NODE_ID) update_return_routes = service.data.get(const.ATTR_RETURN_ROUTES) node = network.nodes[node_id] _LOGGER.info("Z-Wave node heal running for node %s", node_id) node.heal(update_return_routes) def test_node(service): """Send test messages to a node on the network.""" node_id = service.data.get(const.ATTR_NODE_ID) messages = service.data.get(const.ATTR_MESSAGES) node = network.nodes[node_id] _LOGGER.info("Sending %s test-messages to node %s", messages, node_id) node.test(messages) def start_zwave(_service_or_event): """Startup Z-Wave network.""" _LOGGER.info("Starting Z-Wave network...") network.start() hass.bus.fire(const.EVENT_NETWORK_START) async def _check_awaked(): """Wait for Z-wave awaked state (or timeout) and finalize start.""" _LOGGER.debug("network state: %d %s", network.state, network.state_str) start_time = dt_util.utcnow() while True: waited = int((dt_util.utcnow() - start_time).total_seconds()) if network.state >= network.STATE_AWAKED: # Need to be in STATE_AWAKED before talking to nodes. _LOGGER.info("Z-Wave ready after %d seconds", waited) break if waited >= const.NETWORK_READY_WAIT_SECS: # Wait up to NETWORK_READY_WAIT_SECS seconds for the Z-Wave # network to be ready. _LOGGER.warning( "Z-Wave not ready after %d seconds, continuing anyway", waited ) _LOGGER.info( "final network state: %d %s", network.state, network.state_str ) break await asyncio.sleep(1) hass.async_add_job(_finalize_start) hass.add_job(_check_awaked) def _finalize_start(): """Perform final initializations after Z-Wave network is awaked.""" polling_interval = convert(config.get(CONF_POLLING_INTERVAL), int) if polling_interval is not None: network.set_poll_interval(polling_interval, False) poll_interval = network.get_poll_interval() _LOGGER.info("Z-Wave polling interval set to %d ms", poll_interval) hass.bus.listen_once(EVENT_HOMEASSISTANT_STOP, stop_network) # Register node services for Z-Wave network hass.services.register(DOMAIN, const.SERVICE_ADD_NODE, add_node) hass.services.register(DOMAIN, const.SERVICE_ADD_NODE_SECURE, add_node_secure) hass.services.register(DOMAIN, const.SERVICE_REMOVE_NODE, remove_node) hass.services.register(DOMAIN, const.SERVICE_CANCEL_COMMAND, cancel_command) hass.services.register(DOMAIN, const.SERVICE_HEAL_NETWORK, heal_network) hass.services.register(DOMAIN, const.SERVICE_SOFT_RESET, soft_reset) hass.services.register(DOMAIN, const.SERVICE_TEST_NETWORK, test_network) hass.services.register(DOMAIN, const.SERVICE_STOP_NETWORK, stop_network) hass.services.register( DOMAIN, const.SERVICE_RENAME_NODE, rename_node, schema=RENAME_NODE_SCHEMA ) hass.services.register( DOMAIN, const.SERVICE_RENAME_VALUE, rename_value, schema=RENAME_VALUE_SCHEMA ) hass.services.register( DOMAIN, const.SERVICE_SET_CONFIG_PARAMETER, set_config_parameter, schema=SET_CONFIG_PARAMETER_SCHEMA, ) hass.services.register( DOMAIN, const.SERVICE_SET_NODE_VALUE, set_node_value, schema=SET_NODE_VALUE_SCHEMA, ) hass.services.register( DOMAIN, const.SERVICE_REFRESH_NODE_VALUE, refresh_node_value, schema=REFRESH_NODE_VALUE_SCHEMA, ) hass.services.register( DOMAIN, const.SERVICE_PRINT_CONFIG_PARAMETER, print_config_parameter, schema=PRINT_CONFIG_PARAMETER_SCHEMA, ) hass.services.register( DOMAIN, const.SERVICE_REMOVE_FAILED_NODE, remove_failed_node, schema=NODE_SERVICE_SCHEMA, ) hass.services.register( DOMAIN, const.SERVICE_REPLACE_FAILED_NODE, replace_failed_node, schema=NODE_SERVICE_SCHEMA, ) hass.services.register( DOMAIN, const.SERVICE_CHANGE_ASSOCIATION, change_association, schema=CHANGE_ASSOCIATION_SCHEMA, ) hass.services.register( DOMAIN, const.SERVICE_SET_WAKEUP, set_wakeup, schema=SET_WAKEUP_SCHEMA ) hass.services.register( DOMAIN, const.SERVICE_PRINT_NODE, print_node, schema=NODE_SERVICE_SCHEMA ) hass.services.register( DOMAIN, const.SERVICE_REFRESH_ENTITY, async_refresh_entity, schema=REFRESH_ENTITY_SCHEMA, ) hass.services.register( DOMAIN, const.SERVICE_REFRESH_NODE, refresh_node, schema=NODE_SERVICE_SCHEMA ) hass.services.register( DOMAIN, const.SERVICE_RESET_NODE_METERS, reset_node_meters, schema=RESET_NODE_METERS_SCHEMA, ) hass.services.register( DOMAIN, const.SERVICE_SET_POLL_INTENSITY, set_poll_intensity, schema=SET_POLL_INTENSITY_SCHEMA, ) hass.services.register( DOMAIN, const.SERVICE_HEAL_NODE, heal_node, schema=HEAL_NODE_SCHEMA ) hass.services.register( DOMAIN, const.SERVICE_TEST_NODE, test_node, schema=TEST_NODE_SCHEMA ) # Setup autoheal if autoheal: _LOGGER.info("Z-Wave network autoheal is enabled") async_track_time_change(hass, heal_network, hour=0, minute=0, second=0) hass.bus.async_listen_once(EVENT_HOMEASSISTANT_START, start_zwave) hass.services.async_register(DOMAIN, const.SERVICE_START_NETWORK, start_zwave) for entry_component in SUPPORTED_PLATFORMS: hass.async_create_task( hass.config_entries.async_forward_entry_setup(config_entry, entry_component) ) return True class ZWaveDeviceEntityValues: """Manages entity access to the underlying zwave value objects.""" def __init__( self, hass, schema, primary_value, zwave_config, device_config, registry ): """Initialize the values object with the passed entity schema.""" self._hass = hass self._zwave_config = zwave_config self._device_config = device_config self._schema = copy.deepcopy(schema) self._values = {} self._entity = None self._workaround_ignore = False self._registry = registry for name in self._schema[const.DISC_VALUES].keys(): self._values[name] = None self._schema[const.DISC_VALUES][name][const.DISC_INSTANCE] = [ primary_value.instance ] self._values[const.DISC_PRIMARY] = primary_value self._node = primary_value.node self._schema[const.DISC_NODE_ID] = [self._node.node_id] # Check values that have already been discovered for node for value in self._node.values.values(): self.check_value(value) self._check_entity_ready() def __getattr__(self, name): """Get the specified value for this entity.""" return self._values[name] def __iter__(self): """Allow iteration over all values.""" return iter(self._values.values()) def check_value(self, value): """Check if the new value matches a missing value for this entity. If a match is found, it is added to the values mapping. """ if not check_node_schema(value.node, self._schema): return for name in self._values: if self._values[name] is not None: continue if not check_value_schema(value, self._schema[const.DISC_VALUES][name]): continue self._values[name] = value if self._entity: self._entity.value_added() self._entity.value_changed() self._check_entity_ready() def _check_entity_ready(self): """Check if all required values are discovered and create entity.""" if self._workaround_ignore: return if self._entity is not None: return for name in self._schema[const.DISC_VALUES]: if self._values[name] is None and not self._schema[const.DISC_VALUES][ name ].get(const.DISC_OPTIONAL): return component = self._schema[const.DISC_COMPONENT] workaround_component = workaround.get_device_component_mapping(self.primary) if workaround_component and workaround_component != component: if workaround_component == workaround.WORKAROUND_IGNORE: _LOGGER.info( "Ignoring Node %d Value %d due to workaround", self.primary.node.node_id, self.primary.value_id, ) # No entity will be created for this value self._workaround_ignore = True return _LOGGER.debug("Using %s instead of %s", workaround_component, component) component = workaround_component entity_id = self._registry.async_get_entity_id( component, DOMAIN, compute_value_unique_id(self._node, self.primary) ) if entity_id is None: value_name = _value_name(self.primary) entity_id = generate_entity_id(component + ".{}", value_name, []) node_config = self._device_config.get(entity_id) # Configure node _LOGGER.debug( "Adding Node_id=%s Generic_command_class=%s, " "Specific_command_class=%s, " "Command_class=%s, Value type=%s, " "Genre=%s as %s", self._node.node_id, self._node.generic, self._node.specific, self.primary.command_class, self.primary.type, self.primary.genre, component, ) if node_config.get(CONF_IGNORED): _LOGGER.info("Ignoring entity %s due to device settings", entity_id) # No entity will be created for this value self._workaround_ignore = True return polling_intensity = convert(node_config.get(CONF_POLLING_INTENSITY), int) if polling_intensity: self.primary.enable_poll(polling_intensity) platform = import_module(f".{component}", __name__) device = platform.get_device( node=self._node, values=self, node_config=node_config, hass=self._hass ) if device is None: # No entity will be created for this value self._workaround_ignore = True return self._entity = device @callback def _on_ready(sec): _LOGGER.info( "Z-Wave entity %s (node_id: %d) ready after %d seconds", device.name, self._node.node_id, sec, ) self._hass.async_add_job(discover_device, component, device) @callback def _on_timeout(sec): _LOGGER.warning( "Z-Wave entity %s (node_id: %d) not ready after %d seconds, " "continuing anyway", device.name, self._node.node_id, sec, ) self._hass.async_add_job(discover_device, component, device) async def discover_device(component, device): """Put device in a dictionary and call discovery on it.""" if self._hass.data[DATA_DEVICES].get(device.unique_id): return self._hass.data[DATA_DEVICES][device.unique_id] = device if component in SUPPORTED_PLATFORMS: async_dispatcher_send(self._hass, f"zwave_new_{component}", device) else: await discovery.async_load_platform( self._hass, component, DOMAIN, {const.DISCOVERY_DEVICE: device.unique_id}, self._zwave_config, ) if device.unique_id: self._hass.add_job(discover_device, component, device) else: self._hass.add_job(check_has_unique_id, device, _on_ready, _on_timeout) class ZWaveDeviceEntity(ZWaveBaseEntity): """Representation of a Z-Wave node entity.""" def __init__(self, values, domain): """Initialize the z-Wave device.""" # pylint: disable=import-error super().__init__() from openzwave.network import ZWaveNetwork from pydispatch import dispatcher self.values = values self.node = values.primary.node self.values.primary.set_change_verified(False) self._name = _value_name(self.values.primary) self._unique_id = self._compute_unique_id() self._update_attributes() dispatcher.connect( self.network_value_changed, ZWaveNetwork.SIGNAL_VALUE_CHANGED ) def network_value_changed(self, value): """Handle a value change on the network.""" if value.value_id in [v.value_id for v in self.values if v]: return self.value_changed() def value_added(self): """Handle a new value of this entity.""" def value_changed(self): """Handle a changed value for this entity's node.""" self._update_attributes() self.update_properties() self.maybe_schedule_update() async def value_renamed(self, update_ids=False): """Rename the node and update any IDs.""" self._name = _value_name(self.values.primary) if update_ids: # Update entity ID. ent_reg = await async_get_entity_registry(self.hass) new_entity_id = ent_reg.async_generate_entity_id( self.platform.domain, self._name, self.platform.entities.keys() - {self.entity_id}, ) if new_entity_id != self.entity_id: # Don't change the name attribute, it will be None unless # customised and if it's been customised, keep the # customisation. ent_reg.async_update_entity(self.entity_id, new_entity_id=new_entity_id) return # else for the above two ifs, update if not using update_entity self.async_write_ha_state() async def async_added_to_hass(self): """Add device to dict.""" async_dispatcher_connect( self.hass, SIGNAL_REFRESH_ENTITY_FORMAT.format(self.entity_id), self.refresh_from_network, ) def _update_attributes(self): """Update the node attributes. May only be used inside callback.""" self.node_id = self.node.node_id self._name = _value_name(self.values.primary) if not self._unique_id: self._unique_id = self._compute_unique_id() if self._unique_id: self.try_remove_and_add() if self.values.power: self.power_consumption = round( self.values.power.data, self.values.power.precision ) else: self.power_consumption = None def update_properties(self): """Update on data changes for node values.""" @property def should_poll(self): """No polling needed.""" return False @property def unique_id(self): """Return a unique ID.""" return self._unique_id @property def device_info(self): """Return device information.""" identifier, name = node_device_id_and_name( self.node, self.values.primary.instance ) info = { "name": name, "identifiers": {identifier}, "manufacturer": self.node.manufacturer_name, "model": self.node.product_name, } if self.values.primary.instance > 1: info["via_device"] = (DOMAIN, self.node_id) elif self.node_id > 1: info["via_device"] = (DOMAIN, 1) return info @property def name(self): """Return the name of the device.""" return self._name @property def device_state_attributes(self): """Return the device specific state attributes.""" attrs = { const.ATTR_NODE_ID: self.node_id, const.ATTR_VALUE_INDEX: self.values.primary.index, const.ATTR_VALUE_INSTANCE: self.values.primary.instance, const.ATTR_VALUE_ID: str(self.values.primary.value_id), } if self.power_consumption is not None: attrs[ATTR_POWER] = self.power_consumption return attrs def refresh_from_network(self): """Refresh all dependent values from zwave network.""" for value in self.values: if value is not None: self.node.refresh_value(value.value_id) def _compute_unique_id(self): if ( is_node_parsed(self.node) and self.values.primary.label != "Unknown" ) or self.node.is_ready: return compute_value_unique_id(self.node, self.values.primary) return None def compute_value_unique_id(node, value): """Compute unique_id a value would get if it were to get one.""" return f"{node.node_id}-{value.object_id}"
"""The test for the statistics sensor platform.""" from datetime import datetime, timedelta import statistics import unittest import pytest from homeassistant.components import recorder from homeassistant.components.statistics.sensor import StatisticsSensor from homeassistant.const import ATTR_UNIT_OF_MEASUREMENT, STATE_UNKNOWN, TEMP_CELSIUS from homeassistant.setup import setup_component from homeassistant.util import dt as dt_util from tests.async_mock import patch from tests.common import ( fire_time_changed, get_test_home_assistant, init_recorder_component, ) from tests.components.recorder.common import wait_recording_done @pytest.fixture(autouse=True) def mock_legacy_time(legacy_patchable_time): """Make time patchable for all the tests.""" yield class TestStatisticsSensor(unittest.TestCase): """Test the Statistics sensor.""" def setup_method(self, method): """Set up things to be run when tests are started.""" self.hass = get_test_home_assistant() self.values = [17, 20, 15.2, 5, 3.8, 9.2, 6.7, 14, 6] self.count = len(self.values) self.min = min(self.values) self.max = max(self.values) self.total = sum(self.values) self.mean = round(sum(self.values) / len(self.values), 2) self.median = round(statistics.median(self.values), 2) self.deviation = round(statistics.stdev(self.values), 2) self.variance = round(statistics.variance(self.values), 2) self.change = round(self.values[-1] - self.values[0], 2) self.average_change = round(self.change / (len(self.values) - 1), 2) self.change_rate = round(self.change / (60 * (self.count - 1)), 2) self.addCleanup(self.hass.stop) def test_binary_sensor_source(self): """Test if source is a sensor.""" values = ["on", "off", "on", "off", "on", "off", "on"] assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "binary_sensor.test_monitored", } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() for value in values: self.hass.states.set("binary_sensor.test_monitored", value) self.hass.block_till_done() state = self.hass.states.get("sensor.test") assert str(len(values)) == state.state def test_sensor_source(self): """Test if source is a sensor.""" assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() for value in self.values: self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS} ) self.hass.block_till_done() state = self.hass.states.get("sensor.test") assert str(self.mean) == state.state assert self.min == state.attributes.get("min_value") assert self.max == state.attributes.get("max_value") assert self.variance == state.attributes.get("variance") assert self.median == state.attributes.get("median") assert self.deviation == state.attributes.get("standard_deviation") assert self.mean == state.attributes.get("mean") assert self.count == state.attributes.get("count") assert self.total == state.attributes.get("total") assert TEMP_CELSIUS == state.attributes.get("unit_of_measurement") assert self.change == state.attributes.get("change") assert self.average_change == state.attributes.get("average_change") def test_sampling_size(self): """Test rotation.""" assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", "sampling_size": 5, } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() for value in self.values: self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS} ) self.hass.block_till_done() state = self.hass.states.get("sensor.test") assert 3.8 == state.attributes.get("min_value") assert 14 == state.attributes.get("max_value") def test_sampling_size_1(self): """Test validity of stats requiring only one sample.""" assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", "sampling_size": 1, } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() for value in self.values[-3:]: # just the last 3 will do self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS} ) self.hass.block_till_done() state = self.hass.states.get("sensor.test") # require only one data point assert self.values[-1] == state.attributes.get("min_value") assert self.values[-1] == state.attributes.get("max_value") assert self.values[-1] == state.attributes.get("mean") assert self.values[-1] == state.attributes.get("median") assert self.values[-1] == state.attributes.get("total") assert 0 == state.attributes.get("change") assert 0 == state.attributes.get("average_change") # require at least two data points assert STATE_UNKNOWN == state.attributes.get("variance") assert STATE_UNKNOWN == state.attributes.get("standard_deviation") def test_max_age(self): """Test value deprecation.""" now = dt_util.utcnow() mock_data = { "return_time": datetime(now.year + 1, 8, 2, 12, 23, tzinfo=dt_util.UTC) } def mock_now(): return mock_data["return_time"] with patch( "homeassistant.components.statistics.sensor.dt_util.utcnow", new=mock_now ): assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", "max_age": {"minutes": 3}, } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() for value in self.values: self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS}, ) self.hass.block_till_done() # insert the next value one minute later mock_data["return_time"] += timedelta(minutes=1) state = self.hass.states.get("sensor.test") assert 6 == state.attributes.get("min_value") assert 14 == state.attributes.get("max_value") def test_max_age_without_sensor_change(self): """Test value deprecation.""" now = dt_util.utcnow() mock_data = { "return_time": datetime(now.year + 1, 8, 2, 12, 23, tzinfo=dt_util.UTC) } def mock_now(): return mock_data["return_time"] with patch( "homeassistant.components.statistics.sensor.dt_util.utcnow", new=mock_now ): assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", "max_age": {"minutes": 3}, } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() for value in self.values: self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS}, ) self.hass.block_till_done() # insert the next value 30 seconds later mock_data["return_time"] += timedelta(seconds=30) state = self.hass.states.get("sensor.test") assert 3.8 == state.attributes.get("min_value") assert 15.2 == state.attributes.get("max_value") # wait for 3 minutes (max_age). mock_data["return_time"] += timedelta(minutes=3) fire_time_changed(self.hass, mock_data["return_time"]) self.hass.block_till_done() state = self.hass.states.get("sensor.test") assert state.attributes.get("min_value") == STATE_UNKNOWN assert state.attributes.get("max_value") == STATE_UNKNOWN assert state.attributes.get("count") == 0 def test_change_rate(self): """Test min_age/max_age and change_rate.""" now = dt_util.utcnow() mock_data = { "return_time": datetime(now.year + 1, 8, 2, 12, 23, 42, tzinfo=dt_util.UTC) } def mock_now(): return mock_data["return_time"] with patch( "homeassistant.components.statistics.sensor.dt_util.utcnow", new=mock_now ): assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() for value in self.values: self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS}, ) self.hass.block_till_done() # insert the next value one minute later mock_data["return_time"] += timedelta(minutes=1) state = self.hass.states.get("sensor.test") assert datetime( now.year + 1, 8, 2, 12, 23, 42, tzinfo=dt_util.UTC ) == state.attributes.get("min_age") assert datetime( now.year + 1, 8, 2, 12, 23 + self.count - 1, 42, tzinfo=dt_util.UTC ) == state.attributes.get("max_age") assert self.change_rate == state.attributes.get("change_rate") def test_initialize_from_database(self): """Test initializing the statistics from the database.""" # enable the recorder init_recorder_component(self.hass) self.hass.block_till_done() self.hass.data[recorder.DATA_INSTANCE].block_till_done() # store some values for value in self.values: self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS} ) self.hass.block_till_done() # wait for the recorder to really store the data wait_recording_done(self.hass) # only now create the statistics component, so that it must read the # data from the database assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", "sampling_size": 100, } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() # check if the result is as in test_sensor_source() state = self.hass.states.get("sensor.test") assert str(self.mean) == state.state def test_initialize_from_database_with_maxage(self): """Test initializing the statistics from the database.""" now = dt_util.utcnow() mock_data = { "return_time": datetime(now.year + 1, 8, 2, 12, 23, 42, tzinfo=dt_util.UTC) } def mock_now(): return mock_data["return_time"] # Testing correct retrieval from recorder, thus we do not # want purging to occur within the class itself. def mock_purge(self): return # Set maximum age to 3 hours. max_age = 3 # Determine what our minimum age should be based on test values. expected_min_age = mock_data["return_time"] + timedelta( hours=len(self.values) - max_age ) # enable the recorder init_recorder_component(self.hass) self.hass.block_till_done() self.hass.data[recorder.DATA_INSTANCE].block_till_done() with patch( "homeassistant.components.statistics.sensor.dt_util.utcnow", new=mock_now ), patch.object(StatisticsSensor, "_purge_old", mock_purge): # store some values for value in self.values: self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS}, ) self.hass.block_till_done() # insert the next value 1 hour later mock_data["return_time"] += timedelta(hours=1) # wait for the recorder to really store the data wait_recording_done(self.hass) # only now create the statistics component, so that it must read # the data from the database assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", "sampling_size": 100, "max_age": {"hours": max_age}, } }, ) self.hass.block_till_done() self.hass.block_till_done() self.hass.start() self.hass.block_till_done() # check if the result is as in test_sensor_source() state = self.hass.states.get("sensor.test") assert expected_min_age == state.attributes.get("min_age") # The max_age timestamp should be 1 hour before what we have right # now in mock_data['return_time']. assert mock_data["return_time"] == state.attributes.get("max_age") + timedelta( hours=1 )
pschmitt/home-assistant
tests/components/statistics/test_sensor.py
homeassistant/components/zwave/__init__.py
"""The template component.""" from itertools import chain import logging from homeassistant.const import MATCH_ALL _LOGGER = logging.getLogger(__name__) def initialise_templates(hass, templates, attribute_templates=None): """Initialise templates and attribute templates.""" if attribute_templates is None: attribute_templates = {} for template in chain(templates.values(), attribute_templates.values()): if template is None: continue template.hass = hass def extract_entities( device_name, device_type, manual_entity_ids, templates, attribute_templates=None ): """Extract entity ids from templates and attribute templates.""" if attribute_templates is None: attribute_templates = {} entity_ids = set() if manual_entity_ids is None: invalid_templates = [] for template_name, template in chain( templates.items(), attribute_templates.items() ): if template is None: continue template_entity_ids = template.extract_entities() if template_entity_ids != MATCH_ALL: entity_ids |= set(template_entity_ids) else: invalid_templates.append(template_name.replace("_template", "")) entity_ids = list(entity_ids) if invalid_templates: if not entity_ids: entity_ids = MATCH_ALL _LOGGER.warning( "Template %s '%s' has no entity ids configured to track nor" " were we able to extract the entities to track from the %s " "template(s). This entity will only be able to be updated " "manually", device_type, device_name, ", ".join(invalid_templates), ) else: entity_ids = manual_entity_ids return entity_ids
"""The test for the statistics sensor platform.""" from datetime import datetime, timedelta import statistics import unittest import pytest from homeassistant.components import recorder from homeassistant.components.statistics.sensor import StatisticsSensor from homeassistant.const import ATTR_UNIT_OF_MEASUREMENT, STATE_UNKNOWN, TEMP_CELSIUS from homeassistant.setup import setup_component from homeassistant.util import dt as dt_util from tests.async_mock import patch from tests.common import ( fire_time_changed, get_test_home_assistant, init_recorder_component, ) from tests.components.recorder.common import wait_recording_done @pytest.fixture(autouse=True) def mock_legacy_time(legacy_patchable_time): """Make time patchable for all the tests.""" yield class TestStatisticsSensor(unittest.TestCase): """Test the Statistics sensor.""" def setup_method(self, method): """Set up things to be run when tests are started.""" self.hass = get_test_home_assistant() self.values = [17, 20, 15.2, 5, 3.8, 9.2, 6.7, 14, 6] self.count = len(self.values) self.min = min(self.values) self.max = max(self.values) self.total = sum(self.values) self.mean = round(sum(self.values) / len(self.values), 2) self.median = round(statistics.median(self.values), 2) self.deviation = round(statistics.stdev(self.values), 2) self.variance = round(statistics.variance(self.values), 2) self.change = round(self.values[-1] - self.values[0], 2) self.average_change = round(self.change / (len(self.values) - 1), 2) self.change_rate = round(self.change / (60 * (self.count - 1)), 2) self.addCleanup(self.hass.stop) def test_binary_sensor_source(self): """Test if source is a sensor.""" values = ["on", "off", "on", "off", "on", "off", "on"] assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "binary_sensor.test_monitored", } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() for value in values: self.hass.states.set("binary_sensor.test_monitored", value) self.hass.block_till_done() state = self.hass.states.get("sensor.test") assert str(len(values)) == state.state def test_sensor_source(self): """Test if source is a sensor.""" assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() for value in self.values: self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS} ) self.hass.block_till_done() state = self.hass.states.get("sensor.test") assert str(self.mean) == state.state assert self.min == state.attributes.get("min_value") assert self.max == state.attributes.get("max_value") assert self.variance == state.attributes.get("variance") assert self.median == state.attributes.get("median") assert self.deviation == state.attributes.get("standard_deviation") assert self.mean == state.attributes.get("mean") assert self.count == state.attributes.get("count") assert self.total == state.attributes.get("total") assert TEMP_CELSIUS == state.attributes.get("unit_of_measurement") assert self.change == state.attributes.get("change") assert self.average_change == state.attributes.get("average_change") def test_sampling_size(self): """Test rotation.""" assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", "sampling_size": 5, } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() for value in self.values: self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS} ) self.hass.block_till_done() state = self.hass.states.get("sensor.test") assert 3.8 == state.attributes.get("min_value") assert 14 == state.attributes.get("max_value") def test_sampling_size_1(self): """Test validity of stats requiring only one sample.""" assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", "sampling_size": 1, } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() for value in self.values[-3:]: # just the last 3 will do self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS} ) self.hass.block_till_done() state = self.hass.states.get("sensor.test") # require only one data point assert self.values[-1] == state.attributes.get("min_value") assert self.values[-1] == state.attributes.get("max_value") assert self.values[-1] == state.attributes.get("mean") assert self.values[-1] == state.attributes.get("median") assert self.values[-1] == state.attributes.get("total") assert 0 == state.attributes.get("change") assert 0 == state.attributes.get("average_change") # require at least two data points assert STATE_UNKNOWN == state.attributes.get("variance") assert STATE_UNKNOWN == state.attributes.get("standard_deviation") def test_max_age(self): """Test value deprecation.""" now = dt_util.utcnow() mock_data = { "return_time": datetime(now.year + 1, 8, 2, 12, 23, tzinfo=dt_util.UTC) } def mock_now(): return mock_data["return_time"] with patch( "homeassistant.components.statistics.sensor.dt_util.utcnow", new=mock_now ): assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", "max_age": {"minutes": 3}, } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() for value in self.values: self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS}, ) self.hass.block_till_done() # insert the next value one minute later mock_data["return_time"] += timedelta(minutes=1) state = self.hass.states.get("sensor.test") assert 6 == state.attributes.get("min_value") assert 14 == state.attributes.get("max_value") def test_max_age_without_sensor_change(self): """Test value deprecation.""" now = dt_util.utcnow() mock_data = { "return_time": datetime(now.year + 1, 8, 2, 12, 23, tzinfo=dt_util.UTC) } def mock_now(): return mock_data["return_time"] with patch( "homeassistant.components.statistics.sensor.dt_util.utcnow", new=mock_now ): assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", "max_age": {"minutes": 3}, } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() for value in self.values: self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS}, ) self.hass.block_till_done() # insert the next value 30 seconds later mock_data["return_time"] += timedelta(seconds=30) state = self.hass.states.get("sensor.test") assert 3.8 == state.attributes.get("min_value") assert 15.2 == state.attributes.get("max_value") # wait for 3 minutes (max_age). mock_data["return_time"] += timedelta(minutes=3) fire_time_changed(self.hass, mock_data["return_time"]) self.hass.block_till_done() state = self.hass.states.get("sensor.test") assert state.attributes.get("min_value") == STATE_UNKNOWN assert state.attributes.get("max_value") == STATE_UNKNOWN assert state.attributes.get("count") == 0 def test_change_rate(self): """Test min_age/max_age and change_rate.""" now = dt_util.utcnow() mock_data = { "return_time": datetime(now.year + 1, 8, 2, 12, 23, 42, tzinfo=dt_util.UTC) } def mock_now(): return mock_data["return_time"] with patch( "homeassistant.components.statistics.sensor.dt_util.utcnow", new=mock_now ): assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() for value in self.values: self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS}, ) self.hass.block_till_done() # insert the next value one minute later mock_data["return_time"] += timedelta(minutes=1) state = self.hass.states.get("sensor.test") assert datetime( now.year + 1, 8, 2, 12, 23, 42, tzinfo=dt_util.UTC ) == state.attributes.get("min_age") assert datetime( now.year + 1, 8, 2, 12, 23 + self.count - 1, 42, tzinfo=dt_util.UTC ) == state.attributes.get("max_age") assert self.change_rate == state.attributes.get("change_rate") def test_initialize_from_database(self): """Test initializing the statistics from the database.""" # enable the recorder init_recorder_component(self.hass) self.hass.block_till_done() self.hass.data[recorder.DATA_INSTANCE].block_till_done() # store some values for value in self.values: self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS} ) self.hass.block_till_done() # wait for the recorder to really store the data wait_recording_done(self.hass) # only now create the statistics component, so that it must read the # data from the database assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", "sampling_size": 100, } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() # check if the result is as in test_sensor_source() state = self.hass.states.get("sensor.test") assert str(self.mean) == state.state def test_initialize_from_database_with_maxage(self): """Test initializing the statistics from the database.""" now = dt_util.utcnow() mock_data = { "return_time": datetime(now.year + 1, 8, 2, 12, 23, 42, tzinfo=dt_util.UTC) } def mock_now(): return mock_data["return_time"] # Testing correct retrieval from recorder, thus we do not # want purging to occur within the class itself. def mock_purge(self): return # Set maximum age to 3 hours. max_age = 3 # Determine what our minimum age should be based on test values. expected_min_age = mock_data["return_time"] + timedelta( hours=len(self.values) - max_age ) # enable the recorder init_recorder_component(self.hass) self.hass.block_till_done() self.hass.data[recorder.DATA_INSTANCE].block_till_done() with patch( "homeassistant.components.statistics.sensor.dt_util.utcnow", new=mock_now ), patch.object(StatisticsSensor, "_purge_old", mock_purge): # store some values for value in self.values: self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS}, ) self.hass.block_till_done() # insert the next value 1 hour later mock_data["return_time"] += timedelta(hours=1) # wait for the recorder to really store the data wait_recording_done(self.hass) # only now create the statistics component, so that it must read # the data from the database assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", "sampling_size": 100, "max_age": {"hours": max_age}, } }, ) self.hass.block_till_done() self.hass.block_till_done() self.hass.start() self.hass.block_till_done() # check if the result is as in test_sensor_source() state = self.hass.states.get("sensor.test") assert expected_min_age == state.attributes.get("min_age") # The max_age timestamp should be 1 hour before what we have right # now in mock_data['return_time']. assert mock_data["return_time"] == state.attributes.get("max_age") + timedelta( hours=1 )
pschmitt/home-assistant
tests/components/statistics/test_sensor.py
homeassistant/components/template/__init__.py
"""Support for Niko Home Control.""" from datetime import timedelta import logging import nikohomecontrol import voluptuous as vol # Import the device class from the component that you want to support from homeassistant.components.light import ATTR_BRIGHTNESS, PLATFORM_SCHEMA, LightEntity from homeassistant.const import CONF_HOST from homeassistant.exceptions import PlatformNotReady import homeassistant.helpers.config_validation as cv from homeassistant.util import Throttle _LOGGER = logging.getLogger(__name__) MIN_TIME_BETWEEN_UPDATES = timedelta(seconds=1) SCAN_INTERVAL = timedelta(seconds=30) PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({vol.Required(CONF_HOST): cv.string}) async def async_setup_platform(hass, config, async_add_entities, discovery_info=None): """Set up the Niko Home Control light platform.""" host = config[CONF_HOST] try: nhc = nikohomecontrol.NikoHomeControl( {"ip": host, "port": 8000, "timeout": 20000} ) niko_data = NikoHomeControlData(hass, nhc) await niko_data.async_update() except OSError as err: _LOGGER.error("Unable to access %s (%s)", host, err) raise PlatformNotReady async_add_entities( [NikoHomeControlLight(light, niko_data) for light in nhc.list_actions()], True ) class NikoHomeControlLight(LightEntity): """Representation of an Niko Light.""" def __init__(self, light, data): """Set up the Niko Home Control light platform.""" self._data = data self._light = light self._unique_id = f"light-{light.id}" self._name = light.name self._state = light.is_on self._brightness = None @property def unique_id(self): """Return unique ID for light.""" return self._unique_id @property def name(self): """Return the display name of this light.""" return self._name @property def brightness(self): """Return the brightness of the light.""" return self._brightness @property def is_on(self): """Return true if light is on.""" return self._state def turn_on(self, **kwargs): """Instruct the light to turn on.""" self._light.brightness = kwargs.get(ATTR_BRIGHTNESS, 255) _LOGGER.debug("Turn on: %s", self.name) self._light.turn_on() def turn_off(self, **kwargs): """Instruct the light to turn off.""" _LOGGER.debug("Turn off: %s", self.name) self._light.turn_off() async def async_update(self): """Get the latest data from NikoHomeControl API.""" await self._data.async_update() self._state = self._data.get_state(self._light.id) class NikoHomeControlData: """The class for handling data retrieval.""" def __init__(self, hass, nhc): """Set up Niko Home Control Data object.""" self._nhc = nhc self.hass = hass self.available = True self.data = {} self._system_info = None @Throttle(MIN_TIME_BETWEEN_UPDATES) async def async_update(self): """Get the latest data from the NikoHomeControl API.""" _LOGGER.debug("Fetching async state in bulk") try: self.data = await self.hass.async_add_executor_job( self._nhc.list_actions_raw ) self.available = True except OSError as ex: _LOGGER.error("Unable to retrieve data from Niko, %s", str(ex)) self.available = False def get_state(self, aid): """Find and filter state based on action id.""" for state in self.data: if state["id"] == aid: return state["value1"] != 0 _LOGGER.error("Failed to retrieve state off unknown light")
"""The test for the statistics sensor platform.""" from datetime import datetime, timedelta import statistics import unittest import pytest from homeassistant.components import recorder from homeassistant.components.statistics.sensor import StatisticsSensor from homeassistant.const import ATTR_UNIT_OF_MEASUREMENT, STATE_UNKNOWN, TEMP_CELSIUS from homeassistant.setup import setup_component from homeassistant.util import dt as dt_util from tests.async_mock import patch from tests.common import ( fire_time_changed, get_test_home_assistant, init_recorder_component, ) from tests.components.recorder.common import wait_recording_done @pytest.fixture(autouse=True) def mock_legacy_time(legacy_patchable_time): """Make time patchable for all the tests.""" yield class TestStatisticsSensor(unittest.TestCase): """Test the Statistics sensor.""" def setup_method(self, method): """Set up things to be run when tests are started.""" self.hass = get_test_home_assistant() self.values = [17, 20, 15.2, 5, 3.8, 9.2, 6.7, 14, 6] self.count = len(self.values) self.min = min(self.values) self.max = max(self.values) self.total = sum(self.values) self.mean = round(sum(self.values) / len(self.values), 2) self.median = round(statistics.median(self.values), 2) self.deviation = round(statistics.stdev(self.values), 2) self.variance = round(statistics.variance(self.values), 2) self.change = round(self.values[-1] - self.values[0], 2) self.average_change = round(self.change / (len(self.values) - 1), 2) self.change_rate = round(self.change / (60 * (self.count - 1)), 2) self.addCleanup(self.hass.stop) def test_binary_sensor_source(self): """Test if source is a sensor.""" values = ["on", "off", "on", "off", "on", "off", "on"] assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "binary_sensor.test_monitored", } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() for value in values: self.hass.states.set("binary_sensor.test_monitored", value) self.hass.block_till_done() state = self.hass.states.get("sensor.test") assert str(len(values)) == state.state def test_sensor_source(self): """Test if source is a sensor.""" assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() for value in self.values: self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS} ) self.hass.block_till_done() state = self.hass.states.get("sensor.test") assert str(self.mean) == state.state assert self.min == state.attributes.get("min_value") assert self.max == state.attributes.get("max_value") assert self.variance == state.attributes.get("variance") assert self.median == state.attributes.get("median") assert self.deviation == state.attributes.get("standard_deviation") assert self.mean == state.attributes.get("mean") assert self.count == state.attributes.get("count") assert self.total == state.attributes.get("total") assert TEMP_CELSIUS == state.attributes.get("unit_of_measurement") assert self.change == state.attributes.get("change") assert self.average_change == state.attributes.get("average_change") def test_sampling_size(self): """Test rotation.""" assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", "sampling_size": 5, } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() for value in self.values: self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS} ) self.hass.block_till_done() state = self.hass.states.get("sensor.test") assert 3.8 == state.attributes.get("min_value") assert 14 == state.attributes.get("max_value") def test_sampling_size_1(self): """Test validity of stats requiring only one sample.""" assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", "sampling_size": 1, } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() for value in self.values[-3:]: # just the last 3 will do self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS} ) self.hass.block_till_done() state = self.hass.states.get("sensor.test") # require only one data point assert self.values[-1] == state.attributes.get("min_value") assert self.values[-1] == state.attributes.get("max_value") assert self.values[-1] == state.attributes.get("mean") assert self.values[-1] == state.attributes.get("median") assert self.values[-1] == state.attributes.get("total") assert 0 == state.attributes.get("change") assert 0 == state.attributes.get("average_change") # require at least two data points assert STATE_UNKNOWN == state.attributes.get("variance") assert STATE_UNKNOWN == state.attributes.get("standard_deviation") def test_max_age(self): """Test value deprecation.""" now = dt_util.utcnow() mock_data = { "return_time": datetime(now.year + 1, 8, 2, 12, 23, tzinfo=dt_util.UTC) } def mock_now(): return mock_data["return_time"] with patch( "homeassistant.components.statistics.sensor.dt_util.utcnow", new=mock_now ): assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", "max_age": {"minutes": 3}, } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() for value in self.values: self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS}, ) self.hass.block_till_done() # insert the next value one minute later mock_data["return_time"] += timedelta(minutes=1) state = self.hass.states.get("sensor.test") assert 6 == state.attributes.get("min_value") assert 14 == state.attributes.get("max_value") def test_max_age_without_sensor_change(self): """Test value deprecation.""" now = dt_util.utcnow() mock_data = { "return_time": datetime(now.year + 1, 8, 2, 12, 23, tzinfo=dt_util.UTC) } def mock_now(): return mock_data["return_time"] with patch( "homeassistant.components.statistics.sensor.dt_util.utcnow", new=mock_now ): assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", "max_age": {"minutes": 3}, } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() for value in self.values: self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS}, ) self.hass.block_till_done() # insert the next value 30 seconds later mock_data["return_time"] += timedelta(seconds=30) state = self.hass.states.get("sensor.test") assert 3.8 == state.attributes.get("min_value") assert 15.2 == state.attributes.get("max_value") # wait for 3 minutes (max_age). mock_data["return_time"] += timedelta(minutes=3) fire_time_changed(self.hass, mock_data["return_time"]) self.hass.block_till_done() state = self.hass.states.get("sensor.test") assert state.attributes.get("min_value") == STATE_UNKNOWN assert state.attributes.get("max_value") == STATE_UNKNOWN assert state.attributes.get("count") == 0 def test_change_rate(self): """Test min_age/max_age and change_rate.""" now = dt_util.utcnow() mock_data = { "return_time": datetime(now.year + 1, 8, 2, 12, 23, 42, tzinfo=dt_util.UTC) } def mock_now(): return mock_data["return_time"] with patch( "homeassistant.components.statistics.sensor.dt_util.utcnow", new=mock_now ): assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() for value in self.values: self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS}, ) self.hass.block_till_done() # insert the next value one minute later mock_data["return_time"] += timedelta(minutes=1) state = self.hass.states.get("sensor.test") assert datetime( now.year + 1, 8, 2, 12, 23, 42, tzinfo=dt_util.UTC ) == state.attributes.get("min_age") assert datetime( now.year + 1, 8, 2, 12, 23 + self.count - 1, 42, tzinfo=dt_util.UTC ) == state.attributes.get("max_age") assert self.change_rate == state.attributes.get("change_rate") def test_initialize_from_database(self): """Test initializing the statistics from the database.""" # enable the recorder init_recorder_component(self.hass) self.hass.block_till_done() self.hass.data[recorder.DATA_INSTANCE].block_till_done() # store some values for value in self.values: self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS} ) self.hass.block_till_done() # wait for the recorder to really store the data wait_recording_done(self.hass) # only now create the statistics component, so that it must read the # data from the database assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", "sampling_size": 100, } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() # check if the result is as in test_sensor_source() state = self.hass.states.get("sensor.test") assert str(self.mean) == state.state def test_initialize_from_database_with_maxage(self): """Test initializing the statistics from the database.""" now = dt_util.utcnow() mock_data = { "return_time": datetime(now.year + 1, 8, 2, 12, 23, 42, tzinfo=dt_util.UTC) } def mock_now(): return mock_data["return_time"] # Testing correct retrieval from recorder, thus we do not # want purging to occur within the class itself. def mock_purge(self): return # Set maximum age to 3 hours. max_age = 3 # Determine what our minimum age should be based on test values. expected_min_age = mock_data["return_time"] + timedelta( hours=len(self.values) - max_age ) # enable the recorder init_recorder_component(self.hass) self.hass.block_till_done() self.hass.data[recorder.DATA_INSTANCE].block_till_done() with patch( "homeassistant.components.statistics.sensor.dt_util.utcnow", new=mock_now ), patch.object(StatisticsSensor, "_purge_old", mock_purge): # store some values for value in self.values: self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS}, ) self.hass.block_till_done() # insert the next value 1 hour later mock_data["return_time"] += timedelta(hours=1) # wait for the recorder to really store the data wait_recording_done(self.hass) # only now create the statistics component, so that it must read # the data from the database assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", "sampling_size": 100, "max_age": {"hours": max_age}, } }, ) self.hass.block_till_done() self.hass.block_till_done() self.hass.start() self.hass.block_till_done() # check if the result is as in test_sensor_source() state = self.hass.states.get("sensor.test") assert expected_min_age == state.attributes.get("min_age") # The max_age timestamp should be 1 hour before what we have right # now in mock_data['return_time']. assert mock_data["return_time"] == state.attributes.get("max_age") + timedelta( hours=1 )
pschmitt/home-assistant
tests/components/statistics/test_sensor.py
homeassistant/components/niko_home_control/light.py
"""Component that will help set the Microsoft face for verify processing.""" import logging import voluptuous as vol from homeassistant.components.image_processing import ( ATTR_CONFIDENCE, CONF_CONFIDENCE, CONF_ENTITY_ID, CONF_NAME, CONF_SOURCE, PLATFORM_SCHEMA, ImageProcessingFaceEntity, ) from homeassistant.components.microsoft_face import DATA_MICROSOFT_FACE from homeassistant.const import ATTR_NAME from homeassistant.core import split_entity_id from homeassistant.exceptions import HomeAssistantError import homeassistant.helpers.config_validation as cv _LOGGER = logging.getLogger(__name__) CONF_GROUP = "group" PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({vol.Required(CONF_GROUP): cv.slugify}) async def async_setup_platform(hass, config, async_add_entities, discovery_info=None): """Set up the Microsoft Face identify platform.""" api = hass.data[DATA_MICROSOFT_FACE] face_group = config[CONF_GROUP] confidence = config[CONF_CONFIDENCE] entities = [] for camera in config[CONF_SOURCE]: entities.append( MicrosoftFaceIdentifyEntity( camera[CONF_ENTITY_ID], api, face_group, confidence, camera.get(CONF_NAME), ) ) async_add_entities(entities) class MicrosoftFaceIdentifyEntity(ImageProcessingFaceEntity): """Representation of the Microsoft Face API entity for identify.""" def __init__(self, camera_entity, api, face_group, confidence, name=None): """Initialize the Microsoft Face API.""" super().__init__() self._api = api self._camera = camera_entity self._confidence = confidence self._face_group = face_group if name: self._name = name else: self._name = f"MicrosoftFace {split_entity_id(camera_entity)[1]}" @property def confidence(self): """Return minimum confidence for send events.""" return self._confidence @property def camera_entity(self): """Return camera entity id from process pictures.""" return self._camera @property def name(self): """Return the name of the entity.""" return self._name async def async_process_image(self, image): """Process image. This method is a coroutine. """ detect = [] try: face_data = await self._api.call_api("post", "detect", image, binary=True) if face_data: face_ids = [data["faceId"] for data in face_data] detect = await self._api.call_api( "post", "identify", {"faceIds": face_ids, "personGroupId": self._face_group}, ) except HomeAssistantError as err: _LOGGER.error("Can't process image on Microsoft face: %s", err) return # Parse data known_faces = [] total = 0 for face in detect: total += 1 if not face["candidates"]: continue data = face["candidates"][0] name = "" for s_name, s_id in self._api.store[self._face_group].items(): if data["personId"] == s_id: name = s_name break known_faces.append( {ATTR_NAME: name, ATTR_CONFIDENCE: data["confidence"] * 100} ) self.async_process_faces(known_faces, total)
"""The test for the statistics sensor platform.""" from datetime import datetime, timedelta import statistics import unittest import pytest from homeassistant.components import recorder from homeassistant.components.statistics.sensor import StatisticsSensor from homeassistant.const import ATTR_UNIT_OF_MEASUREMENT, STATE_UNKNOWN, TEMP_CELSIUS from homeassistant.setup import setup_component from homeassistant.util import dt as dt_util from tests.async_mock import patch from tests.common import ( fire_time_changed, get_test_home_assistant, init_recorder_component, ) from tests.components.recorder.common import wait_recording_done @pytest.fixture(autouse=True) def mock_legacy_time(legacy_patchable_time): """Make time patchable for all the tests.""" yield class TestStatisticsSensor(unittest.TestCase): """Test the Statistics sensor.""" def setup_method(self, method): """Set up things to be run when tests are started.""" self.hass = get_test_home_assistant() self.values = [17, 20, 15.2, 5, 3.8, 9.2, 6.7, 14, 6] self.count = len(self.values) self.min = min(self.values) self.max = max(self.values) self.total = sum(self.values) self.mean = round(sum(self.values) / len(self.values), 2) self.median = round(statistics.median(self.values), 2) self.deviation = round(statistics.stdev(self.values), 2) self.variance = round(statistics.variance(self.values), 2) self.change = round(self.values[-1] - self.values[0], 2) self.average_change = round(self.change / (len(self.values) - 1), 2) self.change_rate = round(self.change / (60 * (self.count - 1)), 2) self.addCleanup(self.hass.stop) def test_binary_sensor_source(self): """Test if source is a sensor.""" values = ["on", "off", "on", "off", "on", "off", "on"] assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "binary_sensor.test_monitored", } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() for value in values: self.hass.states.set("binary_sensor.test_monitored", value) self.hass.block_till_done() state = self.hass.states.get("sensor.test") assert str(len(values)) == state.state def test_sensor_source(self): """Test if source is a sensor.""" assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() for value in self.values: self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS} ) self.hass.block_till_done() state = self.hass.states.get("sensor.test") assert str(self.mean) == state.state assert self.min == state.attributes.get("min_value") assert self.max == state.attributes.get("max_value") assert self.variance == state.attributes.get("variance") assert self.median == state.attributes.get("median") assert self.deviation == state.attributes.get("standard_deviation") assert self.mean == state.attributes.get("mean") assert self.count == state.attributes.get("count") assert self.total == state.attributes.get("total") assert TEMP_CELSIUS == state.attributes.get("unit_of_measurement") assert self.change == state.attributes.get("change") assert self.average_change == state.attributes.get("average_change") def test_sampling_size(self): """Test rotation.""" assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", "sampling_size": 5, } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() for value in self.values: self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS} ) self.hass.block_till_done() state = self.hass.states.get("sensor.test") assert 3.8 == state.attributes.get("min_value") assert 14 == state.attributes.get("max_value") def test_sampling_size_1(self): """Test validity of stats requiring only one sample.""" assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", "sampling_size": 1, } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() for value in self.values[-3:]: # just the last 3 will do self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS} ) self.hass.block_till_done() state = self.hass.states.get("sensor.test") # require only one data point assert self.values[-1] == state.attributes.get("min_value") assert self.values[-1] == state.attributes.get("max_value") assert self.values[-1] == state.attributes.get("mean") assert self.values[-1] == state.attributes.get("median") assert self.values[-1] == state.attributes.get("total") assert 0 == state.attributes.get("change") assert 0 == state.attributes.get("average_change") # require at least two data points assert STATE_UNKNOWN == state.attributes.get("variance") assert STATE_UNKNOWN == state.attributes.get("standard_deviation") def test_max_age(self): """Test value deprecation.""" now = dt_util.utcnow() mock_data = { "return_time": datetime(now.year + 1, 8, 2, 12, 23, tzinfo=dt_util.UTC) } def mock_now(): return mock_data["return_time"] with patch( "homeassistant.components.statistics.sensor.dt_util.utcnow", new=mock_now ): assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", "max_age": {"minutes": 3}, } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() for value in self.values: self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS}, ) self.hass.block_till_done() # insert the next value one minute later mock_data["return_time"] += timedelta(minutes=1) state = self.hass.states.get("sensor.test") assert 6 == state.attributes.get("min_value") assert 14 == state.attributes.get("max_value") def test_max_age_without_sensor_change(self): """Test value deprecation.""" now = dt_util.utcnow() mock_data = { "return_time": datetime(now.year + 1, 8, 2, 12, 23, tzinfo=dt_util.UTC) } def mock_now(): return mock_data["return_time"] with patch( "homeassistant.components.statistics.sensor.dt_util.utcnow", new=mock_now ): assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", "max_age": {"minutes": 3}, } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() for value in self.values: self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS}, ) self.hass.block_till_done() # insert the next value 30 seconds later mock_data["return_time"] += timedelta(seconds=30) state = self.hass.states.get("sensor.test") assert 3.8 == state.attributes.get("min_value") assert 15.2 == state.attributes.get("max_value") # wait for 3 minutes (max_age). mock_data["return_time"] += timedelta(minutes=3) fire_time_changed(self.hass, mock_data["return_time"]) self.hass.block_till_done() state = self.hass.states.get("sensor.test") assert state.attributes.get("min_value") == STATE_UNKNOWN assert state.attributes.get("max_value") == STATE_UNKNOWN assert state.attributes.get("count") == 0 def test_change_rate(self): """Test min_age/max_age and change_rate.""" now = dt_util.utcnow() mock_data = { "return_time": datetime(now.year + 1, 8, 2, 12, 23, 42, tzinfo=dt_util.UTC) } def mock_now(): return mock_data["return_time"] with patch( "homeassistant.components.statistics.sensor.dt_util.utcnow", new=mock_now ): assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() for value in self.values: self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS}, ) self.hass.block_till_done() # insert the next value one minute later mock_data["return_time"] += timedelta(minutes=1) state = self.hass.states.get("sensor.test") assert datetime( now.year + 1, 8, 2, 12, 23, 42, tzinfo=dt_util.UTC ) == state.attributes.get("min_age") assert datetime( now.year + 1, 8, 2, 12, 23 + self.count - 1, 42, tzinfo=dt_util.UTC ) == state.attributes.get("max_age") assert self.change_rate == state.attributes.get("change_rate") def test_initialize_from_database(self): """Test initializing the statistics from the database.""" # enable the recorder init_recorder_component(self.hass) self.hass.block_till_done() self.hass.data[recorder.DATA_INSTANCE].block_till_done() # store some values for value in self.values: self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS} ) self.hass.block_till_done() # wait for the recorder to really store the data wait_recording_done(self.hass) # only now create the statistics component, so that it must read the # data from the database assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", "sampling_size": 100, } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() # check if the result is as in test_sensor_source() state = self.hass.states.get("sensor.test") assert str(self.mean) == state.state def test_initialize_from_database_with_maxage(self): """Test initializing the statistics from the database.""" now = dt_util.utcnow() mock_data = { "return_time": datetime(now.year + 1, 8, 2, 12, 23, 42, tzinfo=dt_util.UTC) } def mock_now(): return mock_data["return_time"] # Testing correct retrieval from recorder, thus we do not # want purging to occur within the class itself. def mock_purge(self): return # Set maximum age to 3 hours. max_age = 3 # Determine what our minimum age should be based on test values. expected_min_age = mock_data["return_time"] + timedelta( hours=len(self.values) - max_age ) # enable the recorder init_recorder_component(self.hass) self.hass.block_till_done() self.hass.data[recorder.DATA_INSTANCE].block_till_done() with patch( "homeassistant.components.statistics.sensor.dt_util.utcnow", new=mock_now ), patch.object(StatisticsSensor, "_purge_old", mock_purge): # store some values for value in self.values: self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS}, ) self.hass.block_till_done() # insert the next value 1 hour later mock_data["return_time"] += timedelta(hours=1) # wait for the recorder to really store the data wait_recording_done(self.hass) # only now create the statistics component, so that it must read # the data from the database assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", "sampling_size": 100, "max_age": {"hours": max_age}, } }, ) self.hass.block_till_done() self.hass.block_till_done() self.hass.start() self.hass.block_till_done() # check if the result is as in test_sensor_source() state = self.hass.states.get("sensor.test") assert expected_min_age == state.attributes.get("min_age") # The max_age timestamp should be 1 hour before what we have right # now in mock_data['return_time']. assert mock_data["return_time"] == state.attributes.get("max_age") + timedelta( hours=1 )
pschmitt/home-assistant
tests/components/statistics/test_sensor.py
homeassistant/components/microsoft_face_identify/image_processing.py
"""Support for Huawei LTE sensors.""" import logging import re from typing import Optional import attr from homeassistant.components.sensor import ( DEVICE_CLASS_SIGNAL_STRENGTH, DOMAIN as SENSOR_DOMAIN, ) from homeassistant.const import CONF_URL, DATA_BYTES, STATE_UNKNOWN, TIME_SECONDS from . import HuaweiLteBaseEntity from .const import ( DOMAIN, KEY_DEVICE_INFORMATION, KEY_DEVICE_SIGNAL, KEY_MONITORING_MONTH_STATISTICS, KEY_MONITORING_STATUS, KEY_MONITORING_TRAFFIC_STATISTICS, KEY_NET_CURRENT_PLMN, KEY_NET_NET_MODE, KEY_SMS_SMS_COUNT, SENSOR_KEYS, ) _LOGGER = logging.getLogger(__name__) SENSOR_META = { KEY_DEVICE_INFORMATION: dict( include=re.compile(r"^WanIP.*Address$", re.IGNORECASE) ), (KEY_DEVICE_INFORMATION, "WanIPAddress"): dict( name="WAN IP address", icon="mdi:ip", enabled_default=True ), (KEY_DEVICE_INFORMATION, "WanIPv6Address"): dict( name="WAN IPv6 address", icon="mdi:ip" ), (KEY_DEVICE_SIGNAL, "band"): dict(name="Band"), (KEY_DEVICE_SIGNAL, "cell_id"): dict(name="Cell ID"), (KEY_DEVICE_SIGNAL, "lac"): dict(name="LAC", icon="mdi:map-marker"), (KEY_DEVICE_SIGNAL, "mode"): dict( name="Mode", formatter=lambda x: ({"0": "2G", "2": "3G", "7": "4G"}.get(x, "Unknown"), None), ), (KEY_DEVICE_SIGNAL, "pci"): dict(name="PCI"), (KEY_DEVICE_SIGNAL, "rsrq"): dict( name="RSRQ", device_class=DEVICE_CLASS_SIGNAL_STRENGTH, # http://www.lte-anbieter.info/technik/rsrq.php icon=lambda x: (x is None or x < -11) and "mdi:signal-cellular-outline" or x < -8 and "mdi:signal-cellular-1" or x < -5 and "mdi:signal-cellular-2" or "mdi:signal-cellular-3", enabled_default=True, ), (KEY_DEVICE_SIGNAL, "rsrp"): dict( name="RSRP", device_class=DEVICE_CLASS_SIGNAL_STRENGTH, # http://www.lte-anbieter.info/technik/rsrp.php icon=lambda x: (x is None or x < -110) and "mdi:signal-cellular-outline" or x < -95 and "mdi:signal-cellular-1" or x < -80 and "mdi:signal-cellular-2" or "mdi:signal-cellular-3", enabled_default=True, ), (KEY_DEVICE_SIGNAL, "rssi"): dict( name="RSSI", device_class=DEVICE_CLASS_SIGNAL_STRENGTH, # https://eyesaas.com/wi-fi-signal-strength/ icon=lambda x: (x is None or x < -80) and "mdi:signal-cellular-outline" or x < -70 and "mdi:signal-cellular-1" or x < -60 and "mdi:signal-cellular-2" or "mdi:signal-cellular-3", enabled_default=True, ), (KEY_DEVICE_SIGNAL, "sinr"): dict( name="SINR", device_class=DEVICE_CLASS_SIGNAL_STRENGTH, # http://www.lte-anbieter.info/technik/sinr.php icon=lambda x: (x is None or x < 0) and "mdi:signal-cellular-outline" or x < 5 and "mdi:signal-cellular-1" or x < 10 and "mdi:signal-cellular-2" or "mdi:signal-cellular-3", enabled_default=True, ), (KEY_DEVICE_SIGNAL, "rscp"): dict( name="RSCP", device_class=DEVICE_CLASS_SIGNAL_STRENGTH, # https://wiki.teltonika.lt/view/RSCP icon=lambda x: (x is None or x < -95) and "mdi:signal-cellular-outline" or x < -85 and "mdi:signal-cellular-1" or x < -75 and "mdi:signal-cellular-2" or "mdi:signal-cellular-3", ), (KEY_DEVICE_SIGNAL, "ecio"): dict( name="EC/IO", device_class=DEVICE_CLASS_SIGNAL_STRENGTH, # https://wiki.teltonika.lt/view/EC/IO icon=lambda x: (x is None or x < -20) and "mdi:signal-cellular-outline" or x < -10 and "mdi:signal-cellular-1" or x < -6 and "mdi:signal-cellular-2" or "mdi:signal-cellular-3", ), KEY_MONITORING_MONTH_STATISTICS: dict( exclude=re.compile(r"^month(duration|lastcleartime)$", re.IGNORECASE) ), (KEY_MONITORING_MONTH_STATISTICS, "CurrentMonthDownload"): dict( name="Current month download", unit=DATA_BYTES, icon="mdi:download" ), (KEY_MONITORING_MONTH_STATISTICS, "CurrentMonthUpload"): dict( name="Current month upload", unit=DATA_BYTES, icon="mdi:upload" ), KEY_MONITORING_STATUS: dict( include=re.compile( r"^(currentwifiuser|(primary|secondary).*dns)$", re.IGNORECASE ) ), (KEY_MONITORING_STATUS, "CurrentWifiUser"): dict( name="WiFi clients connected", icon="mdi:wifi" ), (KEY_MONITORING_STATUS, "PrimaryDns"): dict( name="Primary DNS server", icon="mdi:ip" ), (KEY_MONITORING_STATUS, "SecondaryDns"): dict( name="Secondary DNS server", icon="mdi:ip" ), (KEY_MONITORING_STATUS, "PrimaryIPv6Dns"): dict( name="Primary IPv6 DNS server", icon="mdi:ip" ), (KEY_MONITORING_STATUS, "SecondaryIPv6Dns"): dict( name="Secondary IPv6 DNS server", icon="mdi:ip" ), KEY_MONITORING_TRAFFIC_STATISTICS: dict( exclude=re.compile(r"^showtraffic$", re.IGNORECASE) ), (KEY_MONITORING_TRAFFIC_STATISTICS, "CurrentConnectTime"): dict( name="Current connection duration", unit=TIME_SECONDS, icon="mdi:timer-outline" ), (KEY_MONITORING_TRAFFIC_STATISTICS, "CurrentDownload"): dict( name="Current connection download", unit=DATA_BYTES, icon="mdi:download" ), (KEY_MONITORING_TRAFFIC_STATISTICS, "CurrentUpload"): dict( name="Current connection upload", unit=DATA_BYTES, icon="mdi:upload" ), (KEY_MONITORING_TRAFFIC_STATISTICS, "TotalConnectTime"): dict( name="Total connected duration", unit=TIME_SECONDS, icon="mdi:timer-outline" ), (KEY_MONITORING_TRAFFIC_STATISTICS, "TotalDownload"): dict( name="Total download", unit=DATA_BYTES, icon="mdi:download" ), (KEY_MONITORING_TRAFFIC_STATISTICS, "TotalUpload"): dict( name="Total upload", unit=DATA_BYTES, icon="mdi:upload" ), KEY_NET_CURRENT_PLMN: dict(exclude=re.compile(r"^(Rat|ShortName)$", re.IGNORECASE)), (KEY_NET_CURRENT_PLMN, "State"): dict( name="Operator search mode", formatter=lambda x: ({"0": "Auto", "1": "Manual"}.get(x, "Unknown"), None), ), (KEY_NET_CURRENT_PLMN, "FullName"): dict(name="Operator name",), (KEY_NET_CURRENT_PLMN, "Numeric"): dict(name="Operator code",), KEY_NET_NET_MODE: dict(include=re.compile(r"^NetworkMode$", re.IGNORECASE)), (KEY_NET_NET_MODE, "NetworkMode"): dict( name="Preferred mode", formatter=lambda x: ( { "00": "4G/3G/2G", "01": "2G", "02": "3G", "03": "4G", "0301": "4G/2G", "0302": "4G/3G", "0201": "3G/2G", }.get(x, "Unknown"), None, ), ), (KEY_SMS_SMS_COUNT, "LocalUnread"): dict( name="SMS unread", icon="mdi:email-receive", ), } async def async_setup_entry(hass, config_entry, async_add_entities): """Set up from config entry.""" router = hass.data[DOMAIN].routers[config_entry.data[CONF_URL]] sensors = [] for key in SENSOR_KEYS: items = router.data.get(key) if not items: continue key_meta = SENSOR_META.get(key) if key_meta: include = key_meta.get("include") if include: items = filter(include.search, items) exclude = key_meta.get("exclude") if exclude: items = [x for x in items if not exclude.search(x)] for item in items: sensors.append( HuaweiLteSensor(router, key, item, SENSOR_META.get((key, item), {})) ) async_add_entities(sensors, True) def format_default(value): """Format value.""" unit = None if value is not None: # Clean up value and infer unit, e.g. -71dBm, 15 dB match = re.match( r"([>=<]*)(?P<value>.+?)\s*(?P<unit>[a-zA-Z]+)\s*$", str(value) ) if match: try: value = float(match.group("value")) unit = match.group("unit") except ValueError: pass return value, unit @attr.s class HuaweiLteSensor(HuaweiLteBaseEntity): """Huawei LTE sensor entity.""" key: str = attr.ib() item: str = attr.ib() meta: dict = attr.ib() _state = attr.ib(init=False, default=STATE_UNKNOWN) _unit: str = attr.ib(init=False) async def async_added_to_hass(self): """Subscribe to needed data on add.""" await super().async_added_to_hass() self.router.subscriptions[self.key].add(f"{SENSOR_DOMAIN}/{self.item}") async def async_will_remove_from_hass(self): """Unsubscribe from needed data on remove.""" await super().async_will_remove_from_hass() self.router.subscriptions[self.key].remove(f"{SENSOR_DOMAIN}/{self.item}") @property def _entity_name(self) -> str: return self.meta.get("name", self.item) @property def _device_unique_id(self) -> str: return f"{self.key}.{self.item}" @property def state(self): """Return sensor state.""" return self._state @property def device_class(self) -> Optional[str]: """Return sensor device class.""" return self.meta.get("device_class") @property def unit_of_measurement(self): """Return sensor's unit of measurement.""" return self.meta.get("unit", self._unit) @property def icon(self): """Return icon for sensor.""" icon = self.meta.get("icon") if callable(icon): return icon(self.state) return icon @property def entity_registry_enabled_default(self) -> bool: """Return if the entity should be enabled when first added to the entity registry.""" return bool(self.meta.get("enabled_default")) async def async_update(self): """Update state.""" try: value = self.router.data[self.key][self.item] except KeyError: _LOGGER.debug("%s[%s] not in data", self.key, self.item) self._available = False return self._available = True formatter = self.meta.get("formatter") if not callable(formatter): formatter = format_default self._state, self._unit = formatter(value) async def async_setup_platform(*args, **kwargs): """Old no longer used way to set up Huawei LTE sensors.""" _LOGGER.warning( "Loading and configuring as a platform is no longer supported or " "required, convert to enabling/disabling available entities" )
"""The test for the statistics sensor platform.""" from datetime import datetime, timedelta import statistics import unittest import pytest from homeassistant.components import recorder from homeassistant.components.statistics.sensor import StatisticsSensor from homeassistant.const import ATTR_UNIT_OF_MEASUREMENT, STATE_UNKNOWN, TEMP_CELSIUS from homeassistant.setup import setup_component from homeassistant.util import dt as dt_util from tests.async_mock import patch from tests.common import ( fire_time_changed, get_test_home_assistant, init_recorder_component, ) from tests.components.recorder.common import wait_recording_done @pytest.fixture(autouse=True) def mock_legacy_time(legacy_patchable_time): """Make time patchable for all the tests.""" yield class TestStatisticsSensor(unittest.TestCase): """Test the Statistics sensor.""" def setup_method(self, method): """Set up things to be run when tests are started.""" self.hass = get_test_home_assistant() self.values = [17, 20, 15.2, 5, 3.8, 9.2, 6.7, 14, 6] self.count = len(self.values) self.min = min(self.values) self.max = max(self.values) self.total = sum(self.values) self.mean = round(sum(self.values) / len(self.values), 2) self.median = round(statistics.median(self.values), 2) self.deviation = round(statistics.stdev(self.values), 2) self.variance = round(statistics.variance(self.values), 2) self.change = round(self.values[-1] - self.values[0], 2) self.average_change = round(self.change / (len(self.values) - 1), 2) self.change_rate = round(self.change / (60 * (self.count - 1)), 2) self.addCleanup(self.hass.stop) def test_binary_sensor_source(self): """Test if source is a sensor.""" values = ["on", "off", "on", "off", "on", "off", "on"] assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "binary_sensor.test_monitored", } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() for value in values: self.hass.states.set("binary_sensor.test_monitored", value) self.hass.block_till_done() state = self.hass.states.get("sensor.test") assert str(len(values)) == state.state def test_sensor_source(self): """Test if source is a sensor.""" assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() for value in self.values: self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS} ) self.hass.block_till_done() state = self.hass.states.get("sensor.test") assert str(self.mean) == state.state assert self.min == state.attributes.get("min_value") assert self.max == state.attributes.get("max_value") assert self.variance == state.attributes.get("variance") assert self.median == state.attributes.get("median") assert self.deviation == state.attributes.get("standard_deviation") assert self.mean == state.attributes.get("mean") assert self.count == state.attributes.get("count") assert self.total == state.attributes.get("total") assert TEMP_CELSIUS == state.attributes.get("unit_of_measurement") assert self.change == state.attributes.get("change") assert self.average_change == state.attributes.get("average_change") def test_sampling_size(self): """Test rotation.""" assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", "sampling_size": 5, } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() for value in self.values: self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS} ) self.hass.block_till_done() state = self.hass.states.get("sensor.test") assert 3.8 == state.attributes.get("min_value") assert 14 == state.attributes.get("max_value") def test_sampling_size_1(self): """Test validity of stats requiring only one sample.""" assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", "sampling_size": 1, } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() for value in self.values[-3:]: # just the last 3 will do self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS} ) self.hass.block_till_done() state = self.hass.states.get("sensor.test") # require only one data point assert self.values[-1] == state.attributes.get("min_value") assert self.values[-1] == state.attributes.get("max_value") assert self.values[-1] == state.attributes.get("mean") assert self.values[-1] == state.attributes.get("median") assert self.values[-1] == state.attributes.get("total") assert 0 == state.attributes.get("change") assert 0 == state.attributes.get("average_change") # require at least two data points assert STATE_UNKNOWN == state.attributes.get("variance") assert STATE_UNKNOWN == state.attributes.get("standard_deviation") def test_max_age(self): """Test value deprecation.""" now = dt_util.utcnow() mock_data = { "return_time": datetime(now.year + 1, 8, 2, 12, 23, tzinfo=dt_util.UTC) } def mock_now(): return mock_data["return_time"] with patch( "homeassistant.components.statistics.sensor.dt_util.utcnow", new=mock_now ): assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", "max_age": {"minutes": 3}, } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() for value in self.values: self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS}, ) self.hass.block_till_done() # insert the next value one minute later mock_data["return_time"] += timedelta(minutes=1) state = self.hass.states.get("sensor.test") assert 6 == state.attributes.get("min_value") assert 14 == state.attributes.get("max_value") def test_max_age_without_sensor_change(self): """Test value deprecation.""" now = dt_util.utcnow() mock_data = { "return_time": datetime(now.year + 1, 8, 2, 12, 23, tzinfo=dt_util.UTC) } def mock_now(): return mock_data["return_time"] with patch( "homeassistant.components.statistics.sensor.dt_util.utcnow", new=mock_now ): assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", "max_age": {"minutes": 3}, } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() for value in self.values: self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS}, ) self.hass.block_till_done() # insert the next value 30 seconds later mock_data["return_time"] += timedelta(seconds=30) state = self.hass.states.get("sensor.test") assert 3.8 == state.attributes.get("min_value") assert 15.2 == state.attributes.get("max_value") # wait for 3 minutes (max_age). mock_data["return_time"] += timedelta(minutes=3) fire_time_changed(self.hass, mock_data["return_time"]) self.hass.block_till_done() state = self.hass.states.get("sensor.test") assert state.attributes.get("min_value") == STATE_UNKNOWN assert state.attributes.get("max_value") == STATE_UNKNOWN assert state.attributes.get("count") == 0 def test_change_rate(self): """Test min_age/max_age and change_rate.""" now = dt_util.utcnow() mock_data = { "return_time": datetime(now.year + 1, 8, 2, 12, 23, 42, tzinfo=dt_util.UTC) } def mock_now(): return mock_data["return_time"] with patch( "homeassistant.components.statistics.sensor.dt_util.utcnow", new=mock_now ): assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() for value in self.values: self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS}, ) self.hass.block_till_done() # insert the next value one minute later mock_data["return_time"] += timedelta(minutes=1) state = self.hass.states.get("sensor.test") assert datetime( now.year + 1, 8, 2, 12, 23, 42, tzinfo=dt_util.UTC ) == state.attributes.get("min_age") assert datetime( now.year + 1, 8, 2, 12, 23 + self.count - 1, 42, tzinfo=dt_util.UTC ) == state.attributes.get("max_age") assert self.change_rate == state.attributes.get("change_rate") def test_initialize_from_database(self): """Test initializing the statistics from the database.""" # enable the recorder init_recorder_component(self.hass) self.hass.block_till_done() self.hass.data[recorder.DATA_INSTANCE].block_till_done() # store some values for value in self.values: self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS} ) self.hass.block_till_done() # wait for the recorder to really store the data wait_recording_done(self.hass) # only now create the statistics component, so that it must read the # data from the database assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", "sampling_size": 100, } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() # check if the result is as in test_sensor_source() state = self.hass.states.get("sensor.test") assert str(self.mean) == state.state def test_initialize_from_database_with_maxage(self): """Test initializing the statistics from the database.""" now = dt_util.utcnow() mock_data = { "return_time": datetime(now.year + 1, 8, 2, 12, 23, 42, tzinfo=dt_util.UTC) } def mock_now(): return mock_data["return_time"] # Testing correct retrieval from recorder, thus we do not # want purging to occur within the class itself. def mock_purge(self): return # Set maximum age to 3 hours. max_age = 3 # Determine what our minimum age should be based on test values. expected_min_age = mock_data["return_time"] + timedelta( hours=len(self.values) - max_age ) # enable the recorder init_recorder_component(self.hass) self.hass.block_till_done() self.hass.data[recorder.DATA_INSTANCE].block_till_done() with patch( "homeassistant.components.statistics.sensor.dt_util.utcnow", new=mock_now ), patch.object(StatisticsSensor, "_purge_old", mock_purge): # store some values for value in self.values: self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS}, ) self.hass.block_till_done() # insert the next value 1 hour later mock_data["return_time"] += timedelta(hours=1) # wait for the recorder to really store the data wait_recording_done(self.hass) # only now create the statistics component, so that it must read # the data from the database assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", "sampling_size": 100, "max_age": {"hours": max_age}, } }, ) self.hass.block_till_done() self.hass.block_till_done() self.hass.start() self.hass.block_till_done() # check if the result is as in test_sensor_source() state = self.hass.states.get("sensor.test") assert expected_min_age == state.attributes.get("min_age") # The max_age timestamp should be 1 hour before what we have right # now in mock_data['return_time']. assert mock_data["return_time"] == state.attributes.get("max_age") + timedelta( hours=1 )
pschmitt/home-assistant
tests/components/statistics/test_sensor.py
homeassistant/components/huawei_lte/sensor.py
"""Support for Yamaha MusicCast Receivers.""" import logging import socket import pymusiccast import voluptuous as vol from homeassistant.components.media_player import PLATFORM_SCHEMA, MediaPlayerEntity from homeassistant.components.media_player.const import ( MEDIA_TYPE_MUSIC, SUPPORT_NEXT_TRACK, SUPPORT_PAUSE, SUPPORT_PLAY, SUPPORT_PREVIOUS_TRACK, SUPPORT_SELECT_SOURCE, SUPPORT_STOP, SUPPORT_TURN_OFF, SUPPORT_TURN_ON, SUPPORT_VOLUME_MUTE, SUPPORT_VOLUME_SET, ) from homeassistant.const import ( CONF_HOST, CONF_PORT, STATE_IDLE, STATE_ON, STATE_PAUSED, STATE_PLAYING, STATE_UNKNOWN, ) import homeassistant.helpers.config_validation as cv import homeassistant.util.dt as dt_util _LOGGER = logging.getLogger(__name__) SUPPORTED_FEATURES = ( SUPPORT_PLAY | SUPPORT_PAUSE | SUPPORT_STOP | SUPPORT_PREVIOUS_TRACK | SUPPORT_NEXT_TRACK | SUPPORT_TURN_ON | SUPPORT_TURN_OFF | SUPPORT_VOLUME_SET | SUPPORT_VOLUME_MUTE | SUPPORT_SELECT_SOURCE ) KNOWN_HOSTS_KEY = "data_yamaha_musiccast" INTERVAL_SECONDS = "interval_seconds" DEFAULT_PORT = 5005 DEFAULT_INTERVAL = 480 PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( { vol.Required(CONF_HOST): cv.string, vol.Optional(CONF_PORT, default=DEFAULT_PORT): cv.port, vol.Optional(INTERVAL_SECONDS, default=DEFAULT_INTERVAL): cv.positive_int, } ) def setup_platform(hass, config, add_entities, discovery_info=None): """Set up the Yamaha MusicCast platform.""" known_hosts = hass.data.get(KNOWN_HOSTS_KEY) if known_hosts is None: known_hosts = hass.data[KNOWN_HOSTS_KEY] = [] _LOGGER.debug("known_hosts: %s", known_hosts) host = config.get(CONF_HOST) port = config.get(CONF_PORT) interval = config.get(INTERVAL_SECONDS) # Get IP of host to prevent duplicates try: ipaddr = socket.gethostbyname(host) except (OSError) as error: _LOGGER.error("Could not communicate with %s:%d: %s", host, port, error) return if [item for item in known_hosts if item[0] == ipaddr]: _LOGGER.warning("Host %s:%d already registered", host, port) return if [item for item in known_hosts if item[1] == port]: _LOGGER.warning("Port %s:%d already registered", host, port) return reg_host = (ipaddr, port) known_hosts.append(reg_host) try: receiver = pymusiccast.McDevice(ipaddr, udp_port=port, mc_interval=interval) except pymusiccast.exceptions.YMCInitError as err: _LOGGER.error(err) receiver = None if receiver: for zone in receiver.zones: _LOGGER.debug("Receiver: %s / Port: %d / Zone: %s", receiver, port, zone) add_entities([YamahaDevice(receiver, receiver.zones[zone])], True) else: known_hosts.remove(reg_host) class YamahaDevice(MediaPlayerEntity): """Representation of a Yamaha MusicCast device.""" def __init__(self, recv, zone): """Initialize the Yamaha MusicCast device.""" self._recv = recv self._name = recv.name self._source = None self._source_list = [] self._zone = zone self.mute = False self.media_status = None self.media_status_received = None self.power = STATE_UNKNOWN self.status = STATE_UNKNOWN self.volume = 0 self.volume_max = 0 self._recv.set_yamaha_device(self) self._zone.set_yamaha_device(self) @property def name(self): """Return the name of the device.""" return f"{self._name} ({self._zone.zone_id})" @property def state(self): """Return the state of the device.""" if self.power == STATE_ON and self.status != STATE_UNKNOWN: return self.status return self.power @property def should_poll(self): """Push an update after each command.""" return True @property def is_volume_muted(self): """Boolean if volume is currently muted.""" return self.mute @property def volume_level(self): """Volume level of the media player (0..1).""" return self.volume @property def supported_features(self): """Flag of features that are supported.""" return SUPPORTED_FEATURES @property def source(self): """Return the current input source.""" return self._source @property def source_list(self): """List of available input sources.""" return self._source_list @source_list.setter def source_list(self, value): """Set source_list attribute.""" self._source_list = value @property def media_content_type(self): """Return the media content type.""" return MEDIA_TYPE_MUSIC @property def media_duration(self): """Duration of current playing media in seconds.""" return self.media_status.media_duration if self.media_status else None @property def media_image_url(self): """Image url of current playing media.""" return self.media_status.media_image_url if self.media_status else None @property def media_artist(self): """Artist of current playing media, music track only.""" return self.media_status.media_artist if self.media_status else None @property def media_album(self): """Album of current playing media, music track only.""" return self.media_status.media_album if self.media_status else None @property def media_track(self): """Track number of current playing media, music track only.""" return self.media_status.media_track if self.media_status else None @property def media_title(self): """Title of current playing media.""" return self.media_status.media_title if self.media_status else None @property def media_position(self): """Position of current playing media in seconds.""" if self.media_status and self.state in [ STATE_PLAYING, STATE_PAUSED, STATE_IDLE, ]: return self.media_status.media_position @property def media_position_updated_at(self): """When was the position of the current playing media valid. Returns value from homeassistant.util.dt.utcnow(). """ return self.media_status_received if self.media_status else None def update(self): """Get the latest details from the device.""" _LOGGER.debug("update: %s", self.entity_id) self._recv.update_status() self._zone.update_status() def update_hass(self): """Push updates to Home Assistant.""" if self.entity_id: _LOGGER.debug("update_hass: pushing updates") self.schedule_update_ha_state() return True def turn_on(self): """Turn on specified media player or all.""" _LOGGER.debug("Turn device: on") self._zone.set_power(True) def turn_off(self): """Turn off specified media player or all.""" _LOGGER.debug("Turn device: off") self._zone.set_power(False) def media_play(self): """Send the media player the command for play/pause.""" _LOGGER.debug("Play") self._recv.set_playback("play") def media_pause(self): """Send the media player the command for pause.""" _LOGGER.debug("Pause") self._recv.set_playback("pause") def media_stop(self): """Send the media player the stop command.""" _LOGGER.debug("Stop") self._recv.set_playback("stop") def media_previous_track(self): """Send the media player the command for prev track.""" _LOGGER.debug("Previous") self._recv.set_playback("previous") def media_next_track(self): """Send the media player the command for next track.""" _LOGGER.debug("Next") self._recv.set_playback("next") def mute_volume(self, mute): """Send mute command.""" _LOGGER.debug("Mute volume: %s", mute) self._zone.set_mute(mute) def set_volume_level(self, volume): """Set volume level, range 0..1.""" _LOGGER.debug("Volume level: %.2f / %d", volume, volume * self.volume_max) self._zone.set_volume(volume * self.volume_max) def select_source(self, source): """Send the media player the command to select input source.""" _LOGGER.debug("select_source: %s", source) self.status = STATE_UNKNOWN self._zone.set_input(source) def new_media_status(self, status): """Handle updates of the media status.""" _LOGGER.debug("new media_status arrived") self.media_status = status self.media_status_received = dt_util.utcnow()
"""The test for the statistics sensor platform.""" from datetime import datetime, timedelta import statistics import unittest import pytest from homeassistant.components import recorder from homeassistant.components.statistics.sensor import StatisticsSensor from homeassistant.const import ATTR_UNIT_OF_MEASUREMENT, STATE_UNKNOWN, TEMP_CELSIUS from homeassistant.setup import setup_component from homeassistant.util import dt as dt_util from tests.async_mock import patch from tests.common import ( fire_time_changed, get_test_home_assistant, init_recorder_component, ) from tests.components.recorder.common import wait_recording_done @pytest.fixture(autouse=True) def mock_legacy_time(legacy_patchable_time): """Make time patchable for all the tests.""" yield class TestStatisticsSensor(unittest.TestCase): """Test the Statistics sensor.""" def setup_method(self, method): """Set up things to be run when tests are started.""" self.hass = get_test_home_assistant() self.values = [17, 20, 15.2, 5, 3.8, 9.2, 6.7, 14, 6] self.count = len(self.values) self.min = min(self.values) self.max = max(self.values) self.total = sum(self.values) self.mean = round(sum(self.values) / len(self.values), 2) self.median = round(statistics.median(self.values), 2) self.deviation = round(statistics.stdev(self.values), 2) self.variance = round(statistics.variance(self.values), 2) self.change = round(self.values[-1] - self.values[0], 2) self.average_change = round(self.change / (len(self.values) - 1), 2) self.change_rate = round(self.change / (60 * (self.count - 1)), 2) self.addCleanup(self.hass.stop) def test_binary_sensor_source(self): """Test if source is a sensor.""" values = ["on", "off", "on", "off", "on", "off", "on"] assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "binary_sensor.test_monitored", } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() for value in values: self.hass.states.set("binary_sensor.test_monitored", value) self.hass.block_till_done() state = self.hass.states.get("sensor.test") assert str(len(values)) == state.state def test_sensor_source(self): """Test if source is a sensor.""" assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() for value in self.values: self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS} ) self.hass.block_till_done() state = self.hass.states.get("sensor.test") assert str(self.mean) == state.state assert self.min == state.attributes.get("min_value") assert self.max == state.attributes.get("max_value") assert self.variance == state.attributes.get("variance") assert self.median == state.attributes.get("median") assert self.deviation == state.attributes.get("standard_deviation") assert self.mean == state.attributes.get("mean") assert self.count == state.attributes.get("count") assert self.total == state.attributes.get("total") assert TEMP_CELSIUS == state.attributes.get("unit_of_measurement") assert self.change == state.attributes.get("change") assert self.average_change == state.attributes.get("average_change") def test_sampling_size(self): """Test rotation.""" assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", "sampling_size": 5, } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() for value in self.values: self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS} ) self.hass.block_till_done() state = self.hass.states.get("sensor.test") assert 3.8 == state.attributes.get("min_value") assert 14 == state.attributes.get("max_value") def test_sampling_size_1(self): """Test validity of stats requiring only one sample.""" assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", "sampling_size": 1, } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() for value in self.values[-3:]: # just the last 3 will do self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS} ) self.hass.block_till_done() state = self.hass.states.get("sensor.test") # require only one data point assert self.values[-1] == state.attributes.get("min_value") assert self.values[-1] == state.attributes.get("max_value") assert self.values[-1] == state.attributes.get("mean") assert self.values[-1] == state.attributes.get("median") assert self.values[-1] == state.attributes.get("total") assert 0 == state.attributes.get("change") assert 0 == state.attributes.get("average_change") # require at least two data points assert STATE_UNKNOWN == state.attributes.get("variance") assert STATE_UNKNOWN == state.attributes.get("standard_deviation") def test_max_age(self): """Test value deprecation.""" now = dt_util.utcnow() mock_data = { "return_time": datetime(now.year + 1, 8, 2, 12, 23, tzinfo=dt_util.UTC) } def mock_now(): return mock_data["return_time"] with patch( "homeassistant.components.statistics.sensor.dt_util.utcnow", new=mock_now ): assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", "max_age": {"minutes": 3}, } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() for value in self.values: self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS}, ) self.hass.block_till_done() # insert the next value one minute later mock_data["return_time"] += timedelta(minutes=1) state = self.hass.states.get("sensor.test") assert 6 == state.attributes.get("min_value") assert 14 == state.attributes.get("max_value") def test_max_age_without_sensor_change(self): """Test value deprecation.""" now = dt_util.utcnow() mock_data = { "return_time": datetime(now.year + 1, 8, 2, 12, 23, tzinfo=dt_util.UTC) } def mock_now(): return mock_data["return_time"] with patch( "homeassistant.components.statistics.sensor.dt_util.utcnow", new=mock_now ): assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", "max_age": {"minutes": 3}, } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() for value in self.values: self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS}, ) self.hass.block_till_done() # insert the next value 30 seconds later mock_data["return_time"] += timedelta(seconds=30) state = self.hass.states.get("sensor.test") assert 3.8 == state.attributes.get("min_value") assert 15.2 == state.attributes.get("max_value") # wait for 3 minutes (max_age). mock_data["return_time"] += timedelta(minutes=3) fire_time_changed(self.hass, mock_data["return_time"]) self.hass.block_till_done() state = self.hass.states.get("sensor.test") assert state.attributes.get("min_value") == STATE_UNKNOWN assert state.attributes.get("max_value") == STATE_UNKNOWN assert state.attributes.get("count") == 0 def test_change_rate(self): """Test min_age/max_age and change_rate.""" now = dt_util.utcnow() mock_data = { "return_time": datetime(now.year + 1, 8, 2, 12, 23, 42, tzinfo=dt_util.UTC) } def mock_now(): return mock_data["return_time"] with patch( "homeassistant.components.statistics.sensor.dt_util.utcnow", new=mock_now ): assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() for value in self.values: self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS}, ) self.hass.block_till_done() # insert the next value one minute later mock_data["return_time"] += timedelta(minutes=1) state = self.hass.states.get("sensor.test") assert datetime( now.year + 1, 8, 2, 12, 23, 42, tzinfo=dt_util.UTC ) == state.attributes.get("min_age") assert datetime( now.year + 1, 8, 2, 12, 23 + self.count - 1, 42, tzinfo=dt_util.UTC ) == state.attributes.get("max_age") assert self.change_rate == state.attributes.get("change_rate") def test_initialize_from_database(self): """Test initializing the statistics from the database.""" # enable the recorder init_recorder_component(self.hass) self.hass.block_till_done() self.hass.data[recorder.DATA_INSTANCE].block_till_done() # store some values for value in self.values: self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS} ) self.hass.block_till_done() # wait for the recorder to really store the data wait_recording_done(self.hass) # only now create the statistics component, so that it must read the # data from the database assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", "sampling_size": 100, } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() # check if the result is as in test_sensor_source() state = self.hass.states.get("sensor.test") assert str(self.mean) == state.state def test_initialize_from_database_with_maxage(self): """Test initializing the statistics from the database.""" now = dt_util.utcnow() mock_data = { "return_time": datetime(now.year + 1, 8, 2, 12, 23, 42, tzinfo=dt_util.UTC) } def mock_now(): return mock_data["return_time"] # Testing correct retrieval from recorder, thus we do not # want purging to occur within the class itself. def mock_purge(self): return # Set maximum age to 3 hours. max_age = 3 # Determine what our minimum age should be based on test values. expected_min_age = mock_data["return_time"] + timedelta( hours=len(self.values) - max_age ) # enable the recorder init_recorder_component(self.hass) self.hass.block_till_done() self.hass.data[recorder.DATA_INSTANCE].block_till_done() with patch( "homeassistant.components.statistics.sensor.dt_util.utcnow", new=mock_now ), patch.object(StatisticsSensor, "_purge_old", mock_purge): # store some values for value in self.values: self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS}, ) self.hass.block_till_done() # insert the next value 1 hour later mock_data["return_time"] += timedelta(hours=1) # wait for the recorder to really store the data wait_recording_done(self.hass) # only now create the statistics component, so that it must read # the data from the database assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", "sampling_size": 100, "max_age": {"hours": max_age}, } }, ) self.hass.block_till_done() self.hass.block_till_done() self.hass.start() self.hass.block_till_done() # check if the result is as in test_sensor_source() state = self.hass.states.get("sensor.test") assert expected_min_age == state.attributes.get("min_age") # The max_age timestamp should be 1 hour before what we have right # now in mock_data['return_time']. assert mock_data["return_time"] == state.attributes.get("max_age") + timedelta( hours=1 )
pschmitt/home-assistant
tests/components/statistics/test_sensor.py
homeassistant/components/yamaha_musiccast/media_player.py
"""Support for Ubee router.""" import logging from pyubee import Ubee import voluptuous as vol from homeassistant.components.device_tracker import ( DOMAIN, PLATFORM_SCHEMA, DeviceScanner, ) from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_USERNAME import homeassistant.helpers.config_validation as cv _LOGGER = logging.getLogger(__name__) CONF_MODEL = "model" DEFAULT_MODEL = "detect" PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( { vol.Required(CONF_HOST): cv.string, vol.Required(CONF_PASSWORD): cv.string, vol.Required(CONF_USERNAME): cv.string, vol.Optional(CONF_MODEL, default=DEFAULT_MODEL): vol.Any( "EVW32C-0N", "EVW320B", "EVW321B", "EVW3200-Wifi", "EVW3226@UPC", "DVW32CB", "DDW36C", ), } ) def get_scanner(hass, config): """Validate the configuration and return a Ubee scanner.""" info = config[DOMAIN] host = info[CONF_HOST] username = info[CONF_USERNAME] password = info[CONF_PASSWORD] model = info[CONF_MODEL] ubee = Ubee(host, username, password, model) if not ubee.login(): _LOGGER.error("Login failed") return None scanner = UbeeDeviceScanner(ubee) return scanner class UbeeDeviceScanner(DeviceScanner): """This class queries a wireless Ubee router.""" def __init__(self, ubee): """Initialize the Ubee scanner.""" self._ubee = ubee self._mac2name = {} def scan_devices(self): """Scan for new devices and return a list with found device IDs.""" devices = self._get_connected_devices() self._mac2name = devices return list(devices) def get_device_name(self, device): """Return the name of the given device or None if we don't know.""" return self._mac2name.get(device) def _get_connected_devices(self): """List connected devices with pyubee.""" if not self._ubee.session_active(): self._ubee.login() return self._ubee.get_connected_devices()
"""The test for the statistics sensor platform.""" from datetime import datetime, timedelta import statistics import unittest import pytest from homeassistant.components import recorder from homeassistant.components.statistics.sensor import StatisticsSensor from homeassistant.const import ATTR_UNIT_OF_MEASUREMENT, STATE_UNKNOWN, TEMP_CELSIUS from homeassistant.setup import setup_component from homeassistant.util import dt as dt_util from tests.async_mock import patch from tests.common import ( fire_time_changed, get_test_home_assistant, init_recorder_component, ) from tests.components.recorder.common import wait_recording_done @pytest.fixture(autouse=True) def mock_legacy_time(legacy_patchable_time): """Make time patchable for all the tests.""" yield class TestStatisticsSensor(unittest.TestCase): """Test the Statistics sensor.""" def setup_method(self, method): """Set up things to be run when tests are started.""" self.hass = get_test_home_assistant() self.values = [17, 20, 15.2, 5, 3.8, 9.2, 6.7, 14, 6] self.count = len(self.values) self.min = min(self.values) self.max = max(self.values) self.total = sum(self.values) self.mean = round(sum(self.values) / len(self.values), 2) self.median = round(statistics.median(self.values), 2) self.deviation = round(statistics.stdev(self.values), 2) self.variance = round(statistics.variance(self.values), 2) self.change = round(self.values[-1] - self.values[0], 2) self.average_change = round(self.change / (len(self.values) - 1), 2) self.change_rate = round(self.change / (60 * (self.count - 1)), 2) self.addCleanup(self.hass.stop) def test_binary_sensor_source(self): """Test if source is a sensor.""" values = ["on", "off", "on", "off", "on", "off", "on"] assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "binary_sensor.test_monitored", } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() for value in values: self.hass.states.set("binary_sensor.test_monitored", value) self.hass.block_till_done() state = self.hass.states.get("sensor.test") assert str(len(values)) == state.state def test_sensor_source(self): """Test if source is a sensor.""" assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() for value in self.values: self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS} ) self.hass.block_till_done() state = self.hass.states.get("sensor.test") assert str(self.mean) == state.state assert self.min == state.attributes.get("min_value") assert self.max == state.attributes.get("max_value") assert self.variance == state.attributes.get("variance") assert self.median == state.attributes.get("median") assert self.deviation == state.attributes.get("standard_deviation") assert self.mean == state.attributes.get("mean") assert self.count == state.attributes.get("count") assert self.total == state.attributes.get("total") assert TEMP_CELSIUS == state.attributes.get("unit_of_measurement") assert self.change == state.attributes.get("change") assert self.average_change == state.attributes.get("average_change") def test_sampling_size(self): """Test rotation.""" assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", "sampling_size": 5, } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() for value in self.values: self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS} ) self.hass.block_till_done() state = self.hass.states.get("sensor.test") assert 3.8 == state.attributes.get("min_value") assert 14 == state.attributes.get("max_value") def test_sampling_size_1(self): """Test validity of stats requiring only one sample.""" assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", "sampling_size": 1, } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() for value in self.values[-3:]: # just the last 3 will do self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS} ) self.hass.block_till_done() state = self.hass.states.get("sensor.test") # require only one data point assert self.values[-1] == state.attributes.get("min_value") assert self.values[-1] == state.attributes.get("max_value") assert self.values[-1] == state.attributes.get("mean") assert self.values[-1] == state.attributes.get("median") assert self.values[-1] == state.attributes.get("total") assert 0 == state.attributes.get("change") assert 0 == state.attributes.get("average_change") # require at least two data points assert STATE_UNKNOWN == state.attributes.get("variance") assert STATE_UNKNOWN == state.attributes.get("standard_deviation") def test_max_age(self): """Test value deprecation.""" now = dt_util.utcnow() mock_data = { "return_time": datetime(now.year + 1, 8, 2, 12, 23, tzinfo=dt_util.UTC) } def mock_now(): return mock_data["return_time"] with patch( "homeassistant.components.statistics.sensor.dt_util.utcnow", new=mock_now ): assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", "max_age": {"minutes": 3}, } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() for value in self.values: self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS}, ) self.hass.block_till_done() # insert the next value one minute later mock_data["return_time"] += timedelta(minutes=1) state = self.hass.states.get("sensor.test") assert 6 == state.attributes.get("min_value") assert 14 == state.attributes.get("max_value") def test_max_age_without_sensor_change(self): """Test value deprecation.""" now = dt_util.utcnow() mock_data = { "return_time": datetime(now.year + 1, 8, 2, 12, 23, tzinfo=dt_util.UTC) } def mock_now(): return mock_data["return_time"] with patch( "homeassistant.components.statistics.sensor.dt_util.utcnow", new=mock_now ): assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", "max_age": {"minutes": 3}, } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() for value in self.values: self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS}, ) self.hass.block_till_done() # insert the next value 30 seconds later mock_data["return_time"] += timedelta(seconds=30) state = self.hass.states.get("sensor.test") assert 3.8 == state.attributes.get("min_value") assert 15.2 == state.attributes.get("max_value") # wait for 3 minutes (max_age). mock_data["return_time"] += timedelta(minutes=3) fire_time_changed(self.hass, mock_data["return_time"]) self.hass.block_till_done() state = self.hass.states.get("sensor.test") assert state.attributes.get("min_value") == STATE_UNKNOWN assert state.attributes.get("max_value") == STATE_UNKNOWN assert state.attributes.get("count") == 0 def test_change_rate(self): """Test min_age/max_age and change_rate.""" now = dt_util.utcnow() mock_data = { "return_time": datetime(now.year + 1, 8, 2, 12, 23, 42, tzinfo=dt_util.UTC) } def mock_now(): return mock_data["return_time"] with patch( "homeassistant.components.statistics.sensor.dt_util.utcnow", new=mock_now ): assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() for value in self.values: self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS}, ) self.hass.block_till_done() # insert the next value one minute later mock_data["return_time"] += timedelta(minutes=1) state = self.hass.states.get("sensor.test") assert datetime( now.year + 1, 8, 2, 12, 23, 42, tzinfo=dt_util.UTC ) == state.attributes.get("min_age") assert datetime( now.year + 1, 8, 2, 12, 23 + self.count - 1, 42, tzinfo=dt_util.UTC ) == state.attributes.get("max_age") assert self.change_rate == state.attributes.get("change_rate") def test_initialize_from_database(self): """Test initializing the statistics from the database.""" # enable the recorder init_recorder_component(self.hass) self.hass.block_till_done() self.hass.data[recorder.DATA_INSTANCE].block_till_done() # store some values for value in self.values: self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS} ) self.hass.block_till_done() # wait for the recorder to really store the data wait_recording_done(self.hass) # only now create the statistics component, so that it must read the # data from the database assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", "sampling_size": 100, } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() # check if the result is as in test_sensor_source() state = self.hass.states.get("sensor.test") assert str(self.mean) == state.state def test_initialize_from_database_with_maxage(self): """Test initializing the statistics from the database.""" now = dt_util.utcnow() mock_data = { "return_time": datetime(now.year + 1, 8, 2, 12, 23, 42, tzinfo=dt_util.UTC) } def mock_now(): return mock_data["return_time"] # Testing correct retrieval from recorder, thus we do not # want purging to occur within the class itself. def mock_purge(self): return # Set maximum age to 3 hours. max_age = 3 # Determine what our minimum age should be based on test values. expected_min_age = mock_data["return_time"] + timedelta( hours=len(self.values) - max_age ) # enable the recorder init_recorder_component(self.hass) self.hass.block_till_done() self.hass.data[recorder.DATA_INSTANCE].block_till_done() with patch( "homeassistant.components.statistics.sensor.dt_util.utcnow", new=mock_now ), patch.object(StatisticsSensor, "_purge_old", mock_purge): # store some values for value in self.values: self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS}, ) self.hass.block_till_done() # insert the next value 1 hour later mock_data["return_time"] += timedelta(hours=1) # wait for the recorder to really store the data wait_recording_done(self.hass) # only now create the statistics component, so that it must read # the data from the database assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", "sampling_size": 100, "max_age": {"hours": max_age}, } }, ) self.hass.block_till_done() self.hass.block_till_done() self.hass.start() self.hass.block_till_done() # check if the result is as in test_sensor_source() state = self.hass.states.get("sensor.test") assert expected_min_age == state.attributes.get("min_age") # The max_age timestamp should be 1 hour before what we have right # now in mock_data['return_time']. assert mock_data["return_time"] == state.attributes.get("max_age") + timedelta( hours=1 )
pschmitt/home-assistant
tests/components/statistics/test_sensor.py
homeassistant/components/ubee/device_tracker.py
"""Component to interface with switches that can be controlled remotely.""" from datetime import timedelta import logging import voluptuous as vol from homeassistant.const import ( SERVICE_TOGGLE, SERVICE_TURN_OFF, SERVICE_TURN_ON, STATE_ON, ) from homeassistant.helpers.config_validation import ( # noqa: F401 PLATFORM_SCHEMA, PLATFORM_SCHEMA_BASE, ) from homeassistant.helpers.entity import ToggleEntity from homeassistant.helpers.entity_component import EntityComponent from homeassistant.loader import bind_hass # mypy: allow-untyped-defs, no-check-untyped-defs DOMAIN = "switch" SCAN_INTERVAL = timedelta(seconds=30) ENTITY_ID_FORMAT = DOMAIN + ".{}" ATTR_TODAY_ENERGY_KWH = "today_energy_kwh" ATTR_CURRENT_POWER_W = "current_power_w" MIN_TIME_BETWEEN_SCANS = timedelta(seconds=10) PROP_TO_ATTR = { "current_power_w": ATTR_CURRENT_POWER_W, "today_energy_kwh": ATTR_TODAY_ENERGY_KWH, } DEVICE_CLASS_OUTLET = "outlet" DEVICE_CLASS_SWITCH = "switch" DEVICE_CLASSES = [DEVICE_CLASS_OUTLET, DEVICE_CLASS_SWITCH] DEVICE_CLASSES_SCHEMA = vol.All(vol.Lower, vol.In(DEVICE_CLASSES)) _LOGGER = logging.getLogger(__name__) @bind_hass def is_on(hass, entity_id): """Return if the switch is on based on the statemachine. Async friendly. """ return hass.states.is_state(entity_id, STATE_ON) async def async_setup(hass, config): """Track states and offer events for switches.""" component = hass.data[DOMAIN] = EntityComponent( _LOGGER, DOMAIN, hass, SCAN_INTERVAL ) await component.async_setup(config) component.async_register_entity_service(SERVICE_TURN_OFF, {}, "async_turn_off") component.async_register_entity_service(SERVICE_TURN_ON, {}, "async_turn_on") component.async_register_entity_service(SERVICE_TOGGLE, {}, "async_toggle") return True async def async_setup_entry(hass, entry): """Set up a config entry.""" return await hass.data[DOMAIN].async_setup_entry(entry) async def async_unload_entry(hass, entry): """Unload a config entry.""" return await hass.data[DOMAIN].async_unload_entry(entry) class SwitchEntity(ToggleEntity): """Representation of a switch.""" @property def current_power_w(self): """Return the current power usage in W.""" return None @property def today_energy_kwh(self): """Return the today total energy usage in kWh.""" return None @property def is_standby(self): """Return true if device is in standby.""" return None @property def state_attributes(self): """Return the optional state attributes.""" data = {} for prop, attr in PROP_TO_ATTR.items(): value = getattr(self, prop) if value is not None: data[attr] = value return data @property def device_class(self): """Return the class of this device, from component DEVICE_CLASSES.""" return None class SwitchDevice(SwitchEntity): """Representation of a switch (for backwards compatibility).""" def __init_subclass__(cls, **kwargs): """Print deprecation warning.""" super().__init_subclass__(**kwargs) _LOGGER.warning( "SwitchDevice is deprecated, modify %s to extend SwitchEntity", cls.__name__, )
"""The test for the statistics sensor platform.""" from datetime import datetime, timedelta import statistics import unittest import pytest from homeassistant.components import recorder from homeassistant.components.statistics.sensor import StatisticsSensor from homeassistant.const import ATTR_UNIT_OF_MEASUREMENT, STATE_UNKNOWN, TEMP_CELSIUS from homeassistant.setup import setup_component from homeassistant.util import dt as dt_util from tests.async_mock import patch from tests.common import ( fire_time_changed, get_test_home_assistant, init_recorder_component, ) from tests.components.recorder.common import wait_recording_done @pytest.fixture(autouse=True) def mock_legacy_time(legacy_patchable_time): """Make time patchable for all the tests.""" yield class TestStatisticsSensor(unittest.TestCase): """Test the Statistics sensor.""" def setup_method(self, method): """Set up things to be run when tests are started.""" self.hass = get_test_home_assistant() self.values = [17, 20, 15.2, 5, 3.8, 9.2, 6.7, 14, 6] self.count = len(self.values) self.min = min(self.values) self.max = max(self.values) self.total = sum(self.values) self.mean = round(sum(self.values) / len(self.values), 2) self.median = round(statistics.median(self.values), 2) self.deviation = round(statistics.stdev(self.values), 2) self.variance = round(statistics.variance(self.values), 2) self.change = round(self.values[-1] - self.values[0], 2) self.average_change = round(self.change / (len(self.values) - 1), 2) self.change_rate = round(self.change / (60 * (self.count - 1)), 2) self.addCleanup(self.hass.stop) def test_binary_sensor_source(self): """Test if source is a sensor.""" values = ["on", "off", "on", "off", "on", "off", "on"] assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "binary_sensor.test_monitored", } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() for value in values: self.hass.states.set("binary_sensor.test_monitored", value) self.hass.block_till_done() state = self.hass.states.get("sensor.test") assert str(len(values)) == state.state def test_sensor_source(self): """Test if source is a sensor.""" assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() for value in self.values: self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS} ) self.hass.block_till_done() state = self.hass.states.get("sensor.test") assert str(self.mean) == state.state assert self.min == state.attributes.get("min_value") assert self.max == state.attributes.get("max_value") assert self.variance == state.attributes.get("variance") assert self.median == state.attributes.get("median") assert self.deviation == state.attributes.get("standard_deviation") assert self.mean == state.attributes.get("mean") assert self.count == state.attributes.get("count") assert self.total == state.attributes.get("total") assert TEMP_CELSIUS == state.attributes.get("unit_of_measurement") assert self.change == state.attributes.get("change") assert self.average_change == state.attributes.get("average_change") def test_sampling_size(self): """Test rotation.""" assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", "sampling_size": 5, } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() for value in self.values: self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS} ) self.hass.block_till_done() state = self.hass.states.get("sensor.test") assert 3.8 == state.attributes.get("min_value") assert 14 == state.attributes.get("max_value") def test_sampling_size_1(self): """Test validity of stats requiring only one sample.""" assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", "sampling_size": 1, } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() for value in self.values[-3:]: # just the last 3 will do self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS} ) self.hass.block_till_done() state = self.hass.states.get("sensor.test") # require only one data point assert self.values[-1] == state.attributes.get("min_value") assert self.values[-1] == state.attributes.get("max_value") assert self.values[-1] == state.attributes.get("mean") assert self.values[-1] == state.attributes.get("median") assert self.values[-1] == state.attributes.get("total") assert 0 == state.attributes.get("change") assert 0 == state.attributes.get("average_change") # require at least two data points assert STATE_UNKNOWN == state.attributes.get("variance") assert STATE_UNKNOWN == state.attributes.get("standard_deviation") def test_max_age(self): """Test value deprecation.""" now = dt_util.utcnow() mock_data = { "return_time": datetime(now.year + 1, 8, 2, 12, 23, tzinfo=dt_util.UTC) } def mock_now(): return mock_data["return_time"] with patch( "homeassistant.components.statistics.sensor.dt_util.utcnow", new=mock_now ): assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", "max_age": {"minutes": 3}, } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() for value in self.values: self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS}, ) self.hass.block_till_done() # insert the next value one minute later mock_data["return_time"] += timedelta(minutes=1) state = self.hass.states.get("sensor.test") assert 6 == state.attributes.get("min_value") assert 14 == state.attributes.get("max_value") def test_max_age_without_sensor_change(self): """Test value deprecation.""" now = dt_util.utcnow() mock_data = { "return_time": datetime(now.year + 1, 8, 2, 12, 23, tzinfo=dt_util.UTC) } def mock_now(): return mock_data["return_time"] with patch( "homeassistant.components.statistics.sensor.dt_util.utcnow", new=mock_now ): assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", "max_age": {"minutes": 3}, } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() for value in self.values: self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS}, ) self.hass.block_till_done() # insert the next value 30 seconds later mock_data["return_time"] += timedelta(seconds=30) state = self.hass.states.get("sensor.test") assert 3.8 == state.attributes.get("min_value") assert 15.2 == state.attributes.get("max_value") # wait for 3 minutes (max_age). mock_data["return_time"] += timedelta(minutes=3) fire_time_changed(self.hass, mock_data["return_time"]) self.hass.block_till_done() state = self.hass.states.get("sensor.test") assert state.attributes.get("min_value") == STATE_UNKNOWN assert state.attributes.get("max_value") == STATE_UNKNOWN assert state.attributes.get("count") == 0 def test_change_rate(self): """Test min_age/max_age and change_rate.""" now = dt_util.utcnow() mock_data = { "return_time": datetime(now.year + 1, 8, 2, 12, 23, 42, tzinfo=dt_util.UTC) } def mock_now(): return mock_data["return_time"] with patch( "homeassistant.components.statistics.sensor.dt_util.utcnow", new=mock_now ): assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() for value in self.values: self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS}, ) self.hass.block_till_done() # insert the next value one minute later mock_data["return_time"] += timedelta(minutes=1) state = self.hass.states.get("sensor.test") assert datetime( now.year + 1, 8, 2, 12, 23, 42, tzinfo=dt_util.UTC ) == state.attributes.get("min_age") assert datetime( now.year + 1, 8, 2, 12, 23 + self.count - 1, 42, tzinfo=dt_util.UTC ) == state.attributes.get("max_age") assert self.change_rate == state.attributes.get("change_rate") def test_initialize_from_database(self): """Test initializing the statistics from the database.""" # enable the recorder init_recorder_component(self.hass) self.hass.block_till_done() self.hass.data[recorder.DATA_INSTANCE].block_till_done() # store some values for value in self.values: self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS} ) self.hass.block_till_done() # wait for the recorder to really store the data wait_recording_done(self.hass) # only now create the statistics component, so that it must read the # data from the database assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", "sampling_size": 100, } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() # check if the result is as in test_sensor_source() state = self.hass.states.get("sensor.test") assert str(self.mean) == state.state def test_initialize_from_database_with_maxage(self): """Test initializing the statistics from the database.""" now = dt_util.utcnow() mock_data = { "return_time": datetime(now.year + 1, 8, 2, 12, 23, 42, tzinfo=dt_util.UTC) } def mock_now(): return mock_data["return_time"] # Testing correct retrieval from recorder, thus we do not # want purging to occur within the class itself. def mock_purge(self): return # Set maximum age to 3 hours. max_age = 3 # Determine what our minimum age should be based on test values. expected_min_age = mock_data["return_time"] + timedelta( hours=len(self.values) - max_age ) # enable the recorder init_recorder_component(self.hass) self.hass.block_till_done() self.hass.data[recorder.DATA_INSTANCE].block_till_done() with patch( "homeassistant.components.statistics.sensor.dt_util.utcnow", new=mock_now ), patch.object(StatisticsSensor, "_purge_old", mock_purge): # store some values for value in self.values: self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS}, ) self.hass.block_till_done() # insert the next value 1 hour later mock_data["return_time"] += timedelta(hours=1) # wait for the recorder to really store the data wait_recording_done(self.hass) # only now create the statistics component, so that it must read # the data from the database assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", "sampling_size": 100, "max_age": {"hours": max_age}, } }, ) self.hass.block_till_done() self.hass.block_till_done() self.hass.start() self.hass.block_till_done() # check if the result is as in test_sensor_source() state = self.hass.states.get("sensor.test") assert expected_min_age == state.attributes.get("min_age") # The max_age timestamp should be 1 hour before what we have right # now in mock_data['return_time']. assert mock_data["return_time"] == state.attributes.get("max_age") + timedelta( hours=1 )
pschmitt/home-assistant
tests/components/statistics/test_sensor.py
homeassistant/components/switch/__init__.py
"""Support for Lutron Caseta shades.""" import logging from homeassistant.components.cover import ( ATTR_POSITION, DOMAIN, SUPPORT_CLOSE, SUPPORT_OPEN, SUPPORT_SET_POSITION, CoverEntity, ) from . import DOMAIN as CASETA_DOMAIN, LutronCasetaDevice _LOGGER = logging.getLogger(__name__) async def async_setup_entry(hass, config_entry, async_add_entities): """Set up the Lutron Caseta cover platform. Adds shades from the Caseta bridge associated with the config_entry as cover entities. """ entities = [] bridge = hass.data[CASETA_DOMAIN][config_entry.entry_id] cover_devices = bridge.get_devices_by_domain(DOMAIN) for cover_device in cover_devices: entity = LutronCasetaCover(cover_device, bridge) entities.append(entity) async_add_entities(entities, True) class LutronCasetaCover(LutronCasetaDevice, CoverEntity): """Representation of a Lutron shade.""" @property def supported_features(self): """Flag supported features.""" return SUPPORT_OPEN | SUPPORT_CLOSE | SUPPORT_SET_POSITION @property def is_closed(self): """Return if the cover is closed.""" return self._device["current_state"] < 1 @property def current_cover_position(self): """Return the current position of cover.""" return self._device["current_state"] async def async_close_cover(self, **kwargs): """Close the cover.""" self._smartbridge.set_value(self.device_id, 0) async def async_open_cover(self, **kwargs): """Open the cover.""" self._smartbridge.set_value(self.device_id, 100) async def async_set_cover_position(self, **kwargs): """Move the shade to a specific position.""" if ATTR_POSITION in kwargs: position = kwargs[ATTR_POSITION] self._smartbridge.set_value(self.device_id, position) async def async_update(self): """Call when forcing a refresh of the device.""" self._device = self._smartbridge.get_device_by_id(self.device_id) _LOGGER.debug(self._device)
"""The test for the statistics sensor platform.""" from datetime import datetime, timedelta import statistics import unittest import pytest from homeassistant.components import recorder from homeassistant.components.statistics.sensor import StatisticsSensor from homeassistant.const import ATTR_UNIT_OF_MEASUREMENT, STATE_UNKNOWN, TEMP_CELSIUS from homeassistant.setup import setup_component from homeassistant.util import dt as dt_util from tests.async_mock import patch from tests.common import ( fire_time_changed, get_test_home_assistant, init_recorder_component, ) from tests.components.recorder.common import wait_recording_done @pytest.fixture(autouse=True) def mock_legacy_time(legacy_patchable_time): """Make time patchable for all the tests.""" yield class TestStatisticsSensor(unittest.TestCase): """Test the Statistics sensor.""" def setup_method(self, method): """Set up things to be run when tests are started.""" self.hass = get_test_home_assistant() self.values = [17, 20, 15.2, 5, 3.8, 9.2, 6.7, 14, 6] self.count = len(self.values) self.min = min(self.values) self.max = max(self.values) self.total = sum(self.values) self.mean = round(sum(self.values) / len(self.values), 2) self.median = round(statistics.median(self.values), 2) self.deviation = round(statistics.stdev(self.values), 2) self.variance = round(statistics.variance(self.values), 2) self.change = round(self.values[-1] - self.values[0], 2) self.average_change = round(self.change / (len(self.values) - 1), 2) self.change_rate = round(self.change / (60 * (self.count - 1)), 2) self.addCleanup(self.hass.stop) def test_binary_sensor_source(self): """Test if source is a sensor.""" values = ["on", "off", "on", "off", "on", "off", "on"] assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "binary_sensor.test_monitored", } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() for value in values: self.hass.states.set("binary_sensor.test_monitored", value) self.hass.block_till_done() state = self.hass.states.get("sensor.test") assert str(len(values)) == state.state def test_sensor_source(self): """Test if source is a sensor.""" assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() for value in self.values: self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS} ) self.hass.block_till_done() state = self.hass.states.get("sensor.test") assert str(self.mean) == state.state assert self.min == state.attributes.get("min_value") assert self.max == state.attributes.get("max_value") assert self.variance == state.attributes.get("variance") assert self.median == state.attributes.get("median") assert self.deviation == state.attributes.get("standard_deviation") assert self.mean == state.attributes.get("mean") assert self.count == state.attributes.get("count") assert self.total == state.attributes.get("total") assert TEMP_CELSIUS == state.attributes.get("unit_of_measurement") assert self.change == state.attributes.get("change") assert self.average_change == state.attributes.get("average_change") def test_sampling_size(self): """Test rotation.""" assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", "sampling_size": 5, } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() for value in self.values: self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS} ) self.hass.block_till_done() state = self.hass.states.get("sensor.test") assert 3.8 == state.attributes.get("min_value") assert 14 == state.attributes.get("max_value") def test_sampling_size_1(self): """Test validity of stats requiring only one sample.""" assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", "sampling_size": 1, } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() for value in self.values[-3:]: # just the last 3 will do self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS} ) self.hass.block_till_done() state = self.hass.states.get("sensor.test") # require only one data point assert self.values[-1] == state.attributes.get("min_value") assert self.values[-1] == state.attributes.get("max_value") assert self.values[-1] == state.attributes.get("mean") assert self.values[-1] == state.attributes.get("median") assert self.values[-1] == state.attributes.get("total") assert 0 == state.attributes.get("change") assert 0 == state.attributes.get("average_change") # require at least two data points assert STATE_UNKNOWN == state.attributes.get("variance") assert STATE_UNKNOWN == state.attributes.get("standard_deviation") def test_max_age(self): """Test value deprecation.""" now = dt_util.utcnow() mock_data = { "return_time": datetime(now.year + 1, 8, 2, 12, 23, tzinfo=dt_util.UTC) } def mock_now(): return mock_data["return_time"] with patch( "homeassistant.components.statistics.sensor.dt_util.utcnow", new=mock_now ): assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", "max_age": {"minutes": 3}, } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() for value in self.values: self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS}, ) self.hass.block_till_done() # insert the next value one minute later mock_data["return_time"] += timedelta(minutes=1) state = self.hass.states.get("sensor.test") assert 6 == state.attributes.get("min_value") assert 14 == state.attributes.get("max_value") def test_max_age_without_sensor_change(self): """Test value deprecation.""" now = dt_util.utcnow() mock_data = { "return_time": datetime(now.year + 1, 8, 2, 12, 23, tzinfo=dt_util.UTC) } def mock_now(): return mock_data["return_time"] with patch( "homeassistant.components.statistics.sensor.dt_util.utcnow", new=mock_now ): assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", "max_age": {"minutes": 3}, } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() for value in self.values: self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS}, ) self.hass.block_till_done() # insert the next value 30 seconds later mock_data["return_time"] += timedelta(seconds=30) state = self.hass.states.get("sensor.test") assert 3.8 == state.attributes.get("min_value") assert 15.2 == state.attributes.get("max_value") # wait for 3 minutes (max_age). mock_data["return_time"] += timedelta(minutes=3) fire_time_changed(self.hass, mock_data["return_time"]) self.hass.block_till_done() state = self.hass.states.get("sensor.test") assert state.attributes.get("min_value") == STATE_UNKNOWN assert state.attributes.get("max_value") == STATE_UNKNOWN assert state.attributes.get("count") == 0 def test_change_rate(self): """Test min_age/max_age and change_rate.""" now = dt_util.utcnow() mock_data = { "return_time": datetime(now.year + 1, 8, 2, 12, 23, 42, tzinfo=dt_util.UTC) } def mock_now(): return mock_data["return_time"] with patch( "homeassistant.components.statistics.sensor.dt_util.utcnow", new=mock_now ): assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() for value in self.values: self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS}, ) self.hass.block_till_done() # insert the next value one minute later mock_data["return_time"] += timedelta(minutes=1) state = self.hass.states.get("sensor.test") assert datetime( now.year + 1, 8, 2, 12, 23, 42, tzinfo=dt_util.UTC ) == state.attributes.get("min_age") assert datetime( now.year + 1, 8, 2, 12, 23 + self.count - 1, 42, tzinfo=dt_util.UTC ) == state.attributes.get("max_age") assert self.change_rate == state.attributes.get("change_rate") def test_initialize_from_database(self): """Test initializing the statistics from the database.""" # enable the recorder init_recorder_component(self.hass) self.hass.block_till_done() self.hass.data[recorder.DATA_INSTANCE].block_till_done() # store some values for value in self.values: self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS} ) self.hass.block_till_done() # wait for the recorder to really store the data wait_recording_done(self.hass) # only now create the statistics component, so that it must read the # data from the database assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", "sampling_size": 100, } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() # check if the result is as in test_sensor_source() state = self.hass.states.get("sensor.test") assert str(self.mean) == state.state def test_initialize_from_database_with_maxage(self): """Test initializing the statistics from the database.""" now = dt_util.utcnow() mock_data = { "return_time": datetime(now.year + 1, 8, 2, 12, 23, 42, tzinfo=dt_util.UTC) } def mock_now(): return mock_data["return_time"] # Testing correct retrieval from recorder, thus we do not # want purging to occur within the class itself. def mock_purge(self): return # Set maximum age to 3 hours. max_age = 3 # Determine what our minimum age should be based on test values. expected_min_age = mock_data["return_time"] + timedelta( hours=len(self.values) - max_age ) # enable the recorder init_recorder_component(self.hass) self.hass.block_till_done() self.hass.data[recorder.DATA_INSTANCE].block_till_done() with patch( "homeassistant.components.statistics.sensor.dt_util.utcnow", new=mock_now ), patch.object(StatisticsSensor, "_purge_old", mock_purge): # store some values for value in self.values: self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS}, ) self.hass.block_till_done() # insert the next value 1 hour later mock_data["return_time"] += timedelta(hours=1) # wait for the recorder to really store the data wait_recording_done(self.hass) # only now create the statistics component, so that it must read # the data from the database assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", "sampling_size": 100, "max_age": {"hours": max_age}, } }, ) self.hass.block_till_done() self.hass.block_till_done() self.hass.start() self.hass.block_till_done() # check if the result is as in test_sensor_source() state = self.hass.states.get("sensor.test") assert expected_min_age == state.attributes.get("min_age") # The max_age timestamp should be 1 hour before what we have right # now in mock_data['return_time']. assert mock_data["return_time"] == state.attributes.get("max_age") + timedelta( hours=1 )
pschmitt/home-assistant
tests/components/statistics/test_sensor.py
homeassistant/components/lutron_caseta/cover.py
"""Support for (EMEA/EU-based) Honeywell TCC climate systems. Such systems include evohome, Round Thermostat, and others. """ from datetime import datetime as dt, timedelta import logging import re from typing import Any, Dict, Optional, Tuple import aiohttp.client_exceptions import evohomeasync import evohomeasync2 import voluptuous as vol from homeassistant.const import ( ATTR_ENTITY_ID, CONF_PASSWORD, CONF_SCAN_INTERVAL, CONF_USERNAME, HTTP_SERVICE_UNAVAILABLE, HTTP_TOO_MANY_REQUESTS, TEMP_CELSIUS, ) from homeassistant.core import callback from homeassistant.helpers.aiohttp_client import async_get_clientsession import homeassistant.helpers.config_validation as cv from homeassistant.helpers.discovery import async_load_platform from homeassistant.helpers.dispatcher import ( async_dispatcher_connect, async_dispatcher_send, ) from homeassistant.helpers.entity import Entity from homeassistant.helpers.service import verify_domain_control from homeassistant.helpers.typing import ConfigType, HomeAssistantType import homeassistant.util.dt as dt_util from .const import DOMAIN, EVO_FOLLOW, GWS, STORAGE_KEY, STORAGE_VER, TCS, UTC_OFFSET _LOGGER = logging.getLogger(__name__) ACCESS_TOKEN = "access_token" ACCESS_TOKEN_EXPIRES = "access_token_expires" REFRESH_TOKEN = "refresh_token" USER_DATA = "user_data" CONF_LOCATION_IDX = "location_idx" SCAN_INTERVAL_DEFAULT = timedelta(seconds=300) SCAN_INTERVAL_MINIMUM = timedelta(seconds=60) CONFIG_SCHEMA = vol.Schema( { DOMAIN: vol.Schema( { vol.Required(CONF_USERNAME): cv.string, vol.Required(CONF_PASSWORD): cv.string, vol.Optional(CONF_LOCATION_IDX, default=0): cv.positive_int, vol.Optional( CONF_SCAN_INTERVAL, default=SCAN_INTERVAL_DEFAULT ): vol.All(cv.time_period, vol.Range(min=SCAN_INTERVAL_MINIMUM)), } ) }, extra=vol.ALLOW_EXTRA, ) ATTR_SYSTEM_MODE = "mode" ATTR_DURATION_DAYS = "period" ATTR_DURATION_HOURS = "duration" ATTR_ZONE_TEMP = "setpoint" ATTR_DURATION_UNTIL = "duration" SVC_REFRESH_SYSTEM = "refresh_system" SVC_SET_SYSTEM_MODE = "set_system_mode" SVC_RESET_SYSTEM = "reset_system" SVC_SET_ZONE_OVERRIDE = "set_zone_override" SVC_RESET_ZONE_OVERRIDE = "clear_zone_override" RESET_ZONE_OVERRIDE_SCHEMA = vol.Schema({vol.Required(ATTR_ENTITY_ID): cv.entity_id}) SET_ZONE_OVERRIDE_SCHEMA = vol.Schema( { vol.Required(ATTR_ENTITY_ID): cv.entity_id, vol.Required(ATTR_ZONE_TEMP): vol.All( vol.Coerce(float), vol.Range(min=4.0, max=35.0) ), vol.Optional(ATTR_DURATION_UNTIL): vol.All( cv.time_period, vol.Range(min=timedelta(days=0), max=timedelta(days=1)) ), } ) # system mode schemas are built dynamically, below def _dt_local_to_aware(dt_naive: dt) -> dt: dt_aware = dt_util.now() + (dt_naive - dt.now()) if dt_aware.microsecond >= 500000: dt_aware += timedelta(seconds=1) return dt_aware.replace(microsecond=0) def _dt_aware_to_naive(dt_aware: dt) -> dt: dt_naive = dt.now() + (dt_aware - dt_util.now()) if dt_naive.microsecond >= 500000: dt_naive += timedelta(seconds=1) return dt_naive.replace(microsecond=0) def convert_until(status_dict: dict, until_key: str) -> None: """Reformat a dt str from "%Y-%m-%dT%H:%M:%SZ" as local/aware/isoformat.""" if until_key in status_dict: # only present for certain modes dt_utc_naive = dt_util.parse_datetime(status_dict[until_key]) status_dict[until_key] = dt_util.as_local(dt_utc_naive).isoformat() def convert_dict(dictionary: Dict[str, Any]) -> Dict[str, Any]: """Recursively convert a dict's keys to snake_case.""" def convert_key(key: str) -> str: """Convert a string to snake_case.""" string = re.sub(r"[\-\.\s]", "_", str(key)) return (string[0]).lower() + re.sub( r"[A-Z]", lambda matched: f"_{matched.group(0).lower()}", string[1:] ) return { (convert_key(k) if isinstance(k, str) else k): ( convert_dict(v) if isinstance(v, dict) else v ) for k, v in dictionary.items() } def _handle_exception(err) -> bool: """Return False if the exception can't be ignored.""" try: raise err except evohomeasync2.AuthenticationError: _LOGGER.error( "Failed to authenticate with the vendor's server. " "Check your network and the vendor's service status page. " "Also check that your username and password are correct. " "Message is: %s", err, ) return False except aiohttp.ClientConnectionError: # this appears to be a common occurrence with the vendor's servers _LOGGER.warning( "Unable to connect with the vendor's server. " "Check your network and the vendor's service status page. " "Message is: %s", err, ) return False except aiohttp.ClientResponseError: if err.status == HTTP_SERVICE_UNAVAILABLE: _LOGGER.warning( "The vendor says their server is currently unavailable. " "Check the vendor's service status page" ) return False if err.status == HTTP_TOO_MANY_REQUESTS: _LOGGER.warning( "The vendor's API rate limit has been exceeded. " "If this message persists, consider increasing the %s", CONF_SCAN_INTERVAL, ) return False raise # we don't expect/handle any other Exceptions async def async_setup(hass: HomeAssistantType, config: ConfigType) -> bool: """Create a (EMEA/EU-based) Honeywell TCC system.""" async def load_auth_tokens(store) -> Tuple[Dict, Optional[Dict]]: app_storage = await store.async_load() tokens = dict(app_storage if app_storage else {}) if tokens.pop(CONF_USERNAME, None) != config[DOMAIN][CONF_USERNAME]: # any tokens won't be valid, and store might be be corrupt await store.async_save({}) return ({}, None) # evohomeasync2 requires naive/local datetimes as strings if tokens.get(ACCESS_TOKEN_EXPIRES) is not None: tokens[ACCESS_TOKEN_EXPIRES] = _dt_aware_to_naive( dt_util.parse_datetime(tokens[ACCESS_TOKEN_EXPIRES]) ) user_data = tokens.pop(USER_DATA, None) return (tokens, user_data) store = hass.helpers.storage.Store(STORAGE_VER, STORAGE_KEY) tokens, user_data = await load_auth_tokens(store) client_v2 = evohomeasync2.EvohomeClient( config[DOMAIN][CONF_USERNAME], config[DOMAIN][CONF_PASSWORD], **tokens, session=async_get_clientsession(hass), ) try: await client_v2.login() except (aiohttp.ClientError, evohomeasync2.AuthenticationError) as err: _handle_exception(err) return False finally: config[DOMAIN][CONF_PASSWORD] = "REDACTED" loc_idx = config[DOMAIN][CONF_LOCATION_IDX] try: loc_config = client_v2.installation_info[loc_idx] except IndexError: _LOGGER.error( "Config error: '%s' = %s, but the valid range is 0-%s. " "Unable to continue. Fix any configuration errors and restart HA", CONF_LOCATION_IDX, loc_idx, len(client_v2.installation_info) - 1, ) return False if _LOGGER.isEnabledFor(logging.DEBUG): _config = {"locationInfo": {"timeZone": None}, GWS: [{TCS: None}]} _config["locationInfo"]["timeZone"] = loc_config["locationInfo"]["timeZone"] _config[GWS][0][TCS] = loc_config[GWS][0][TCS] _LOGGER.debug("Config = %s", _config) client_v1 = evohomeasync.EvohomeClient( client_v2.username, client_v2.password, user_data=user_data, session=async_get_clientsession(hass), ) hass.data[DOMAIN] = {} hass.data[DOMAIN]["broker"] = broker = EvoBroker( hass, client_v2, client_v1, store, config[DOMAIN] ) await broker.save_auth_tokens() await broker.async_update() # get initial state hass.async_create_task(async_load_platform(hass, "climate", DOMAIN, {}, config)) if broker.tcs.hotwater: hass.async_create_task( async_load_platform(hass, "water_heater", DOMAIN, {}, config) ) hass.helpers.event.async_track_time_interval( broker.async_update, config[DOMAIN][CONF_SCAN_INTERVAL] ) setup_service_functions(hass, broker) return True @callback def setup_service_functions(hass: HomeAssistantType, broker): """Set up the service handlers for the system/zone operating modes. Not all Honeywell TCC-compatible systems support all operating modes. In addition, each mode will require any of four distinct service schemas. This has to be enumerated before registering the appropriate handlers. It appears that all TCC-compatible systems support the same three zones modes. """ @verify_domain_control(hass, DOMAIN) async def force_refresh(call) -> None: """Obtain the latest state data via the vendor's RESTful API.""" await broker.async_update() @verify_domain_control(hass, DOMAIN) async def set_system_mode(call) -> None: """Set the system mode.""" payload = { "unique_id": broker.tcs.systemId, "service": call.service, "data": call.data, } async_dispatcher_send(hass, DOMAIN, payload) @verify_domain_control(hass, DOMAIN) async def set_zone_override(call) -> None: """Set the zone override (setpoint).""" entity_id = call.data[ATTR_ENTITY_ID] registry = await hass.helpers.entity_registry.async_get_registry() registry_entry = registry.async_get(entity_id) if registry_entry is None or registry_entry.platform != DOMAIN: raise ValueError(f"'{entity_id}' is not a known {DOMAIN} entity") if registry_entry.domain != "climate": raise ValueError(f"'{entity_id}' is not an {DOMAIN} controller/zone") payload = { "unique_id": registry_entry.unique_id, "service": call.service, "data": call.data, } async_dispatcher_send(hass, DOMAIN, payload) hass.services.async_register(DOMAIN, SVC_REFRESH_SYSTEM, force_refresh) # Enumerate which operating modes are supported by this system modes = broker.config["allowedSystemModes"] # Not all systems support "AutoWithReset": register this handler only if required if [m["systemMode"] for m in modes if m["systemMode"] == "AutoWithReset"]: hass.services.async_register(DOMAIN, SVC_RESET_SYSTEM, set_system_mode) system_mode_schemas = [] modes = [m for m in modes if m["systemMode"] != "AutoWithReset"] # Permanent-only modes will use this schema perm_modes = [m["systemMode"] for m in modes if not m["canBeTemporary"]] if perm_modes: # any of: "Auto", "HeatingOff": permanent only schema = vol.Schema({vol.Required(ATTR_SYSTEM_MODE): vol.In(perm_modes)}) system_mode_schemas.append(schema) modes = [m for m in modes if m["canBeTemporary"]] # These modes are set for a number of hours (or indefinitely): use this schema temp_modes = [m["systemMode"] for m in modes if m["timingMode"] == "Duration"] if temp_modes: # any of: "AutoWithEco", permanent or for 0-24 hours schema = vol.Schema( { vol.Required(ATTR_SYSTEM_MODE): vol.In(temp_modes), vol.Optional(ATTR_DURATION_HOURS): vol.All( cv.time_period, vol.Range(min=timedelta(hours=0), max=timedelta(hours=24)), ), } ) system_mode_schemas.append(schema) # These modes are set for a number of days (or indefinitely): use this schema temp_modes = [m["systemMode"] for m in modes if m["timingMode"] == "Period"] if temp_modes: # any of: "Away", "Custom", "DayOff", permanent or for 1-99 days schema = vol.Schema( { vol.Required(ATTR_SYSTEM_MODE): vol.In(temp_modes), vol.Optional(ATTR_DURATION_DAYS): vol.All( cv.time_period, vol.Range(min=timedelta(days=1), max=timedelta(days=99)), ), } ) system_mode_schemas.append(schema) if system_mode_schemas: hass.services.async_register( DOMAIN, SVC_SET_SYSTEM_MODE, set_system_mode, schema=vol.Any(*system_mode_schemas), ) # The zone modes are consistent across all systems and use the same schema hass.services.async_register( DOMAIN, SVC_RESET_ZONE_OVERRIDE, set_zone_override, schema=RESET_ZONE_OVERRIDE_SCHEMA, ) hass.services.async_register( DOMAIN, SVC_SET_ZONE_OVERRIDE, set_zone_override, schema=SET_ZONE_OVERRIDE_SCHEMA, ) class EvoBroker: """Container for evohome client and data.""" def __init__(self, hass, client, client_v1, store, params) -> None: """Initialize the evohome client and its data structure.""" self.hass = hass self.client = client self.client_v1 = client_v1 self._store = store self.params = params loc_idx = params[CONF_LOCATION_IDX] self.config = client.installation_info[loc_idx][GWS][0][TCS][0] self.tcs = client.locations[loc_idx]._gateways[0]._control_systems[0] self.tcs_utc_offset = timedelta( minutes=client.locations[loc_idx].timeZone[UTC_OFFSET] ) self.temps = {} async def save_auth_tokens(self) -> None: """Save access tokens and session IDs to the store for later use.""" # evohomeasync2 uses naive/local datetimes access_token_expires = _dt_local_to_aware(self.client.access_token_expires) app_storage = {CONF_USERNAME: self.client.username} app_storage[REFRESH_TOKEN] = self.client.refresh_token app_storage[ACCESS_TOKEN] = self.client.access_token app_storage[ACCESS_TOKEN_EXPIRES] = access_token_expires.isoformat() if self.client_v1 and self.client_v1.user_data: app_storage[USER_DATA] = { "userInfo": {"userID": self.client_v1.user_data["userInfo"]["userID"]}, "sessionId": self.client_v1.user_data["sessionId"], } else: app_storage[USER_DATA] = None await self._store.async_save(app_storage) async def call_client_api(self, api_function, refresh=True) -> Any: """Call a client API.""" try: result = await api_function except (aiohttp.ClientError, evohomeasync2.AuthenticationError) as err: if not _handle_exception(err): return if refresh: self.hass.helpers.event.async_call_later(1, self.async_update()) return result async def _update_v1(self, *args, **kwargs) -> None: """Get the latest high-precision temperatures of the default Location.""" def get_session_id(client_v1) -> Optional[str]: user_data = client_v1.user_data if client_v1 else None return user_data.get("sessionId") if user_data else None session_id = get_session_id(self.client_v1) try: temps = list(await self.client_v1.temperatures(force_refresh=True)) except aiohttp.ClientError as err: _LOGGER.warning( "Unable to obtain the latest high-precision temperatures. " "Check your network and the vendor's service status page. " "Proceeding with low-precision temperatures. " "Message is: %s", err, ) self.temps = None # these are now stale, will fall back to v2 temps else: if ( str(self.client_v1.location_id) != self.client.locations[self.params[CONF_LOCATION_IDX]].locationId ): _LOGGER.warning( "The v2 API's configured location doesn't match " "the v1 API's default location (there is more than one location), " "so the high-precision feature will be disabled" ) self.client_v1 = self.temps = None else: self.temps = {str(i["id"]): i["temp"] for i in temps} _LOGGER.debug("Temperatures = %s", self.temps) if session_id != get_session_id(self.client_v1): await self.save_auth_tokens() async def _update_v2(self, *args, **kwargs) -> None: """Get the latest modes, temperatures, setpoints of a Location.""" access_token = self.client.access_token loc_idx = self.params[CONF_LOCATION_IDX] try: status = await self.client.locations[loc_idx].status() except (aiohttp.ClientError, evohomeasync2.AuthenticationError) as err: _handle_exception(err) else: async_dispatcher_send(self.hass, DOMAIN) _LOGGER.debug("Status = %s", status) if access_token != self.client.access_token: await self.save_auth_tokens() async def async_update(self, *args, **kwargs) -> None: """Get the latest state data of an entire Honeywell TCC Location. This includes state data for a Controller and all its child devices, such as the operating mode of the Controller and the current temp of its children (e.g. Zones, DHW controller). """ await self._update_v2() if self.client_v1: await self._update_v1() # inform the evohome devices that state data has been updated async_dispatcher_send(self.hass, DOMAIN) class EvoDevice(Entity): """Base for any evohome device. This includes the Controller, (up to 12) Heating Zones and (optionally) a DHW controller. """ def __init__(self, evo_broker, evo_device) -> None: """Initialize the evohome entity.""" self._evo_device = evo_device self._evo_broker = evo_broker self._evo_tcs = evo_broker.tcs self._unique_id = self._name = self._icon = self._precision = None self._supported_features = None self._device_state_attrs = {} async def async_refresh(self, payload: Optional[dict] = None) -> None: """Process any signals.""" if payload is None: self.async_schedule_update_ha_state(force_refresh=True) return if payload["unique_id"] != self._unique_id: return if payload["service"] in [SVC_SET_ZONE_OVERRIDE, SVC_RESET_ZONE_OVERRIDE]: await self.async_zone_svc_request(payload["service"], payload["data"]) return await self.async_tcs_svc_request(payload["service"], payload["data"]) async def async_tcs_svc_request(self, service: dict, data: dict) -> None: """Process a service request (system mode) for a controller.""" raise NotImplementedError async def async_zone_svc_request(self, service: dict, data: dict) -> None: """Process a service request (setpoint override) for a zone.""" raise NotImplementedError @property def should_poll(self) -> bool: """Evohome entities should not be polled.""" return False @property def unique_id(self) -> Optional[str]: """Return a unique ID.""" return self._unique_id @property def name(self) -> str: """Return the name of the evohome entity.""" return self._name @property def device_state_attributes(self) -> Dict[str, Any]: """Return the evohome-specific state attributes.""" status = self._device_state_attrs if "systemModeStatus" in status: convert_until(status["systemModeStatus"], "timeUntil") if "setpointStatus" in status: convert_until(status["setpointStatus"], "until") if "stateStatus" in status: convert_until(status["stateStatus"], "until") return {"status": convert_dict(status)} @property def icon(self) -> str: """Return the icon to use in the frontend UI.""" return self._icon @property def supported_features(self) -> int: """Get the flag of supported features of the device.""" return self._supported_features async def async_added_to_hass(self) -> None: """Run when entity about to be added to hass.""" async_dispatcher_connect(self.hass, DOMAIN, self.async_refresh) @property def precision(self) -> float: """Return the temperature precision to use in the frontend UI.""" return self._precision @property def temperature_unit(self) -> str: """Return the temperature unit to use in the frontend UI.""" return TEMP_CELSIUS class EvoChild(EvoDevice): """Base for any evohome child. This includes (up to 12) Heating Zones and (optionally) a DHW controller. """ def __init__(self, evo_broker, evo_device) -> None: """Initialize a evohome Controller (hub).""" super().__init__(evo_broker, evo_device) self._schedule = {} self._setpoints = {} @property def current_temperature(self) -> Optional[float]: """Return the current temperature of a Zone.""" if self._evo_broker.temps: if self._evo_broker.temps[self._evo_device.zoneId] != 128: return self._evo_broker.temps[self._evo_device.zoneId] if self._evo_device.temperatureStatus["isAvailable"]: return self._evo_device.temperatureStatus["temperature"] @property def setpoints(self) -> Dict[str, Any]: """Return the current/next setpoints from the schedule. Only Zones & DHW controllers (but not the TCS) can have schedules. """ def _dt_evo_to_aware(dt_naive: dt, utc_offset: timedelta) -> dt: dt_aware = dt_naive.replace(tzinfo=dt_util.UTC) - utc_offset return dt_util.as_local(dt_aware) if not self._schedule["DailySchedules"]: return {} # no schedule {'DailySchedules': []}, so no scheduled setpoints day_time = dt_util.now() day_of_week = int(day_time.strftime("%w")) # 0 is Sunday time_of_day = day_time.strftime("%H:%M:%S") try: # Iterate today's switchpoints until past the current time of day... day = self._schedule["DailySchedules"][day_of_week] sp_idx = -1 # last switchpoint of the day before for i, tmp in enumerate(day["Switchpoints"]): if time_of_day > tmp["TimeOfDay"]: sp_idx = i # current setpoint else: break # Did the current SP start yesterday? Does the next start SP tomorrow? this_sp_day = -1 if sp_idx == -1 else 0 next_sp_day = 1 if sp_idx + 1 == len(day["Switchpoints"]) else 0 for key, offset, idx in [ ("this", this_sp_day, sp_idx), ("next", next_sp_day, (sp_idx + 1) * (1 - next_sp_day)), ]: sp_date = (day_time + timedelta(days=offset)).strftime("%Y-%m-%d") day = self._schedule["DailySchedules"][(day_of_week + offset) % 7] switchpoint = day["Switchpoints"][idx] dt_aware = _dt_evo_to_aware( dt_util.parse_datetime(f"{sp_date}T{switchpoint['TimeOfDay']}"), self._evo_broker.tcs_utc_offset, ) self._setpoints[f"{key}_sp_from"] = dt_aware.isoformat() try: self._setpoints[f"{key}_sp_temp"] = switchpoint["heatSetpoint"] except KeyError: self._setpoints[f"{key}_sp_state"] = switchpoint["DhwState"] except IndexError: self._setpoints = {} _LOGGER.warning( "Failed to get setpoints, report as an issue if this error persists", exc_info=True, ) return self._setpoints async def _update_schedule(self) -> None: """Get the latest schedule, if any.""" if "DailySchedules" in self._schedule and not self._schedule["DailySchedules"]: if not self._evo_device.setpointStatus["setpointMode"] == EVO_FOLLOW: return # avoid unnecessary I/O - there's nothing to update self._schedule = await self._evo_broker.call_client_api( self._evo_device.schedule(), refresh=False ) _LOGGER.debug("Schedule['%s'] = %s", self.name, self._schedule) async def async_update(self) -> None: """Get the latest state data.""" next_sp_from = self._setpoints.get("next_sp_from", "2000-01-01T00:00:00+00:00") if dt_util.now() >= dt_util.parse_datetime(next_sp_from): await self._update_schedule() # no schedule, or it's out-of-date self._device_state_attrs = {"setpoints": self.setpoints}
"""The test for the statistics sensor platform.""" from datetime import datetime, timedelta import statistics import unittest import pytest from homeassistant.components import recorder from homeassistant.components.statistics.sensor import StatisticsSensor from homeassistant.const import ATTR_UNIT_OF_MEASUREMENT, STATE_UNKNOWN, TEMP_CELSIUS from homeassistant.setup import setup_component from homeassistant.util import dt as dt_util from tests.async_mock import patch from tests.common import ( fire_time_changed, get_test_home_assistant, init_recorder_component, ) from tests.components.recorder.common import wait_recording_done @pytest.fixture(autouse=True) def mock_legacy_time(legacy_patchable_time): """Make time patchable for all the tests.""" yield class TestStatisticsSensor(unittest.TestCase): """Test the Statistics sensor.""" def setup_method(self, method): """Set up things to be run when tests are started.""" self.hass = get_test_home_assistant() self.values = [17, 20, 15.2, 5, 3.8, 9.2, 6.7, 14, 6] self.count = len(self.values) self.min = min(self.values) self.max = max(self.values) self.total = sum(self.values) self.mean = round(sum(self.values) / len(self.values), 2) self.median = round(statistics.median(self.values), 2) self.deviation = round(statistics.stdev(self.values), 2) self.variance = round(statistics.variance(self.values), 2) self.change = round(self.values[-1] - self.values[0], 2) self.average_change = round(self.change / (len(self.values) - 1), 2) self.change_rate = round(self.change / (60 * (self.count - 1)), 2) self.addCleanup(self.hass.stop) def test_binary_sensor_source(self): """Test if source is a sensor.""" values = ["on", "off", "on", "off", "on", "off", "on"] assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "binary_sensor.test_monitored", } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() for value in values: self.hass.states.set("binary_sensor.test_monitored", value) self.hass.block_till_done() state = self.hass.states.get("sensor.test") assert str(len(values)) == state.state def test_sensor_source(self): """Test if source is a sensor.""" assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() for value in self.values: self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS} ) self.hass.block_till_done() state = self.hass.states.get("sensor.test") assert str(self.mean) == state.state assert self.min == state.attributes.get("min_value") assert self.max == state.attributes.get("max_value") assert self.variance == state.attributes.get("variance") assert self.median == state.attributes.get("median") assert self.deviation == state.attributes.get("standard_deviation") assert self.mean == state.attributes.get("mean") assert self.count == state.attributes.get("count") assert self.total == state.attributes.get("total") assert TEMP_CELSIUS == state.attributes.get("unit_of_measurement") assert self.change == state.attributes.get("change") assert self.average_change == state.attributes.get("average_change") def test_sampling_size(self): """Test rotation.""" assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", "sampling_size": 5, } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() for value in self.values: self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS} ) self.hass.block_till_done() state = self.hass.states.get("sensor.test") assert 3.8 == state.attributes.get("min_value") assert 14 == state.attributes.get("max_value") def test_sampling_size_1(self): """Test validity of stats requiring only one sample.""" assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", "sampling_size": 1, } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() for value in self.values[-3:]: # just the last 3 will do self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS} ) self.hass.block_till_done() state = self.hass.states.get("sensor.test") # require only one data point assert self.values[-1] == state.attributes.get("min_value") assert self.values[-1] == state.attributes.get("max_value") assert self.values[-1] == state.attributes.get("mean") assert self.values[-1] == state.attributes.get("median") assert self.values[-1] == state.attributes.get("total") assert 0 == state.attributes.get("change") assert 0 == state.attributes.get("average_change") # require at least two data points assert STATE_UNKNOWN == state.attributes.get("variance") assert STATE_UNKNOWN == state.attributes.get("standard_deviation") def test_max_age(self): """Test value deprecation.""" now = dt_util.utcnow() mock_data = { "return_time": datetime(now.year + 1, 8, 2, 12, 23, tzinfo=dt_util.UTC) } def mock_now(): return mock_data["return_time"] with patch( "homeassistant.components.statistics.sensor.dt_util.utcnow", new=mock_now ): assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", "max_age": {"minutes": 3}, } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() for value in self.values: self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS}, ) self.hass.block_till_done() # insert the next value one minute later mock_data["return_time"] += timedelta(minutes=1) state = self.hass.states.get("sensor.test") assert 6 == state.attributes.get("min_value") assert 14 == state.attributes.get("max_value") def test_max_age_without_sensor_change(self): """Test value deprecation.""" now = dt_util.utcnow() mock_data = { "return_time": datetime(now.year + 1, 8, 2, 12, 23, tzinfo=dt_util.UTC) } def mock_now(): return mock_data["return_time"] with patch( "homeassistant.components.statistics.sensor.dt_util.utcnow", new=mock_now ): assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", "max_age": {"minutes": 3}, } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() for value in self.values: self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS}, ) self.hass.block_till_done() # insert the next value 30 seconds later mock_data["return_time"] += timedelta(seconds=30) state = self.hass.states.get("sensor.test") assert 3.8 == state.attributes.get("min_value") assert 15.2 == state.attributes.get("max_value") # wait for 3 minutes (max_age). mock_data["return_time"] += timedelta(minutes=3) fire_time_changed(self.hass, mock_data["return_time"]) self.hass.block_till_done() state = self.hass.states.get("sensor.test") assert state.attributes.get("min_value") == STATE_UNKNOWN assert state.attributes.get("max_value") == STATE_UNKNOWN assert state.attributes.get("count") == 0 def test_change_rate(self): """Test min_age/max_age and change_rate.""" now = dt_util.utcnow() mock_data = { "return_time": datetime(now.year + 1, 8, 2, 12, 23, 42, tzinfo=dt_util.UTC) } def mock_now(): return mock_data["return_time"] with patch( "homeassistant.components.statistics.sensor.dt_util.utcnow", new=mock_now ): assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() for value in self.values: self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS}, ) self.hass.block_till_done() # insert the next value one minute later mock_data["return_time"] += timedelta(minutes=1) state = self.hass.states.get("sensor.test") assert datetime( now.year + 1, 8, 2, 12, 23, 42, tzinfo=dt_util.UTC ) == state.attributes.get("min_age") assert datetime( now.year + 1, 8, 2, 12, 23 + self.count - 1, 42, tzinfo=dt_util.UTC ) == state.attributes.get("max_age") assert self.change_rate == state.attributes.get("change_rate") def test_initialize_from_database(self): """Test initializing the statistics from the database.""" # enable the recorder init_recorder_component(self.hass) self.hass.block_till_done() self.hass.data[recorder.DATA_INSTANCE].block_till_done() # store some values for value in self.values: self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS} ) self.hass.block_till_done() # wait for the recorder to really store the data wait_recording_done(self.hass) # only now create the statistics component, so that it must read the # data from the database assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", "sampling_size": 100, } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() # check if the result is as in test_sensor_source() state = self.hass.states.get("sensor.test") assert str(self.mean) == state.state def test_initialize_from_database_with_maxage(self): """Test initializing the statistics from the database.""" now = dt_util.utcnow() mock_data = { "return_time": datetime(now.year + 1, 8, 2, 12, 23, 42, tzinfo=dt_util.UTC) } def mock_now(): return mock_data["return_time"] # Testing correct retrieval from recorder, thus we do not # want purging to occur within the class itself. def mock_purge(self): return # Set maximum age to 3 hours. max_age = 3 # Determine what our minimum age should be based on test values. expected_min_age = mock_data["return_time"] + timedelta( hours=len(self.values) - max_age ) # enable the recorder init_recorder_component(self.hass) self.hass.block_till_done() self.hass.data[recorder.DATA_INSTANCE].block_till_done() with patch( "homeassistant.components.statistics.sensor.dt_util.utcnow", new=mock_now ), patch.object(StatisticsSensor, "_purge_old", mock_purge): # store some values for value in self.values: self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS}, ) self.hass.block_till_done() # insert the next value 1 hour later mock_data["return_time"] += timedelta(hours=1) # wait for the recorder to really store the data wait_recording_done(self.hass) # only now create the statistics component, so that it must read # the data from the database assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", "sampling_size": 100, "max_age": {"hours": max_age}, } }, ) self.hass.block_till_done() self.hass.block_till_done() self.hass.start() self.hass.block_till_done() # check if the result is as in test_sensor_source() state = self.hass.states.get("sensor.test") assert expected_min_age == state.attributes.get("min_age") # The max_age timestamp should be 1 hour before what we have right # now in mock_data['return_time']. assert mock_data["return_time"] == state.attributes.get("max_age") + timedelta( hours=1 )
pschmitt/home-assistant
tests/components/statistics/test_sensor.py
homeassistant/components/evohome/__init__.py
"""Support for particulate matter sensors connected to a serial port.""" import logging from pmsensor import serial_pm as pm import voluptuous as vol from homeassistant.components.sensor import PLATFORM_SCHEMA from homeassistant.const import CONCENTRATION_MICROGRAMS_PER_CUBIC_METER, CONF_NAME import homeassistant.helpers.config_validation as cv from homeassistant.helpers.entity import Entity _LOGGER = logging.getLogger(__name__) CONF_BRAND = "brand" CONF_SERIAL_DEVICE = "serial_device" PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( { vol.Required(CONF_BRAND): cv.string, vol.Required(CONF_SERIAL_DEVICE): cv.string, vol.Optional(CONF_NAME): cv.string, } ) def setup_platform(hass, config, add_entities, discovery_info=None): """Set up the available PM sensors.""" try: coll = pm.PMDataCollector( config.get(CONF_SERIAL_DEVICE), pm.SUPPORTED_SENSORS[config.get(CONF_BRAND)] ) except KeyError: _LOGGER.error( "Brand %s not supported\n supported brands: %s", config.get(CONF_BRAND), pm.SUPPORTED_SENSORS.keys(), ) return except OSError as err: _LOGGER.error( "Could not open serial connection to %s (%s)", config.get(CONF_SERIAL_DEVICE), err, ) return dev = [] for pmname in coll.supported_values(): if config.get(CONF_NAME) is not None: name = "{} PM{}".format(config.get(CONF_NAME), pmname) else: name = f"PM{pmname}" dev.append(ParticulateMatterSensor(coll, name, pmname)) add_entities(dev) class ParticulateMatterSensor(Entity): """Representation of an Particulate matter sensor.""" def __init__(self, pmDataCollector, name, pmname): """Initialize a new PM sensor.""" self._name = name self._pmname = pmname self._state = None self._collector = pmDataCollector @property def name(self): """Return the name of the sensor.""" return self._name @property def state(self): """Return the state of the sensor.""" return self._state @property def unit_of_measurement(self): """Return the unit of measurement of this entity, if any.""" return CONCENTRATION_MICROGRAMS_PER_CUBIC_METER def update(self): """Read from sensor and update the state.""" _LOGGER.debug("Reading data from PM sensor") try: self._state = self._collector.read_data()[self._pmname] except KeyError: _LOGGER.error("Could not read PM%s value", self._pmname)
"""The test for the statistics sensor platform.""" from datetime import datetime, timedelta import statistics import unittest import pytest from homeassistant.components import recorder from homeassistant.components.statistics.sensor import StatisticsSensor from homeassistant.const import ATTR_UNIT_OF_MEASUREMENT, STATE_UNKNOWN, TEMP_CELSIUS from homeassistant.setup import setup_component from homeassistant.util import dt as dt_util from tests.async_mock import patch from tests.common import ( fire_time_changed, get_test_home_assistant, init_recorder_component, ) from tests.components.recorder.common import wait_recording_done @pytest.fixture(autouse=True) def mock_legacy_time(legacy_patchable_time): """Make time patchable for all the tests.""" yield class TestStatisticsSensor(unittest.TestCase): """Test the Statistics sensor.""" def setup_method(self, method): """Set up things to be run when tests are started.""" self.hass = get_test_home_assistant() self.values = [17, 20, 15.2, 5, 3.8, 9.2, 6.7, 14, 6] self.count = len(self.values) self.min = min(self.values) self.max = max(self.values) self.total = sum(self.values) self.mean = round(sum(self.values) / len(self.values), 2) self.median = round(statistics.median(self.values), 2) self.deviation = round(statistics.stdev(self.values), 2) self.variance = round(statistics.variance(self.values), 2) self.change = round(self.values[-1] - self.values[0], 2) self.average_change = round(self.change / (len(self.values) - 1), 2) self.change_rate = round(self.change / (60 * (self.count - 1)), 2) self.addCleanup(self.hass.stop) def test_binary_sensor_source(self): """Test if source is a sensor.""" values = ["on", "off", "on", "off", "on", "off", "on"] assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "binary_sensor.test_monitored", } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() for value in values: self.hass.states.set("binary_sensor.test_monitored", value) self.hass.block_till_done() state = self.hass.states.get("sensor.test") assert str(len(values)) == state.state def test_sensor_source(self): """Test if source is a sensor.""" assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() for value in self.values: self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS} ) self.hass.block_till_done() state = self.hass.states.get("sensor.test") assert str(self.mean) == state.state assert self.min == state.attributes.get("min_value") assert self.max == state.attributes.get("max_value") assert self.variance == state.attributes.get("variance") assert self.median == state.attributes.get("median") assert self.deviation == state.attributes.get("standard_deviation") assert self.mean == state.attributes.get("mean") assert self.count == state.attributes.get("count") assert self.total == state.attributes.get("total") assert TEMP_CELSIUS == state.attributes.get("unit_of_measurement") assert self.change == state.attributes.get("change") assert self.average_change == state.attributes.get("average_change") def test_sampling_size(self): """Test rotation.""" assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", "sampling_size": 5, } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() for value in self.values: self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS} ) self.hass.block_till_done() state = self.hass.states.get("sensor.test") assert 3.8 == state.attributes.get("min_value") assert 14 == state.attributes.get("max_value") def test_sampling_size_1(self): """Test validity of stats requiring only one sample.""" assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", "sampling_size": 1, } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() for value in self.values[-3:]: # just the last 3 will do self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS} ) self.hass.block_till_done() state = self.hass.states.get("sensor.test") # require only one data point assert self.values[-1] == state.attributes.get("min_value") assert self.values[-1] == state.attributes.get("max_value") assert self.values[-1] == state.attributes.get("mean") assert self.values[-1] == state.attributes.get("median") assert self.values[-1] == state.attributes.get("total") assert 0 == state.attributes.get("change") assert 0 == state.attributes.get("average_change") # require at least two data points assert STATE_UNKNOWN == state.attributes.get("variance") assert STATE_UNKNOWN == state.attributes.get("standard_deviation") def test_max_age(self): """Test value deprecation.""" now = dt_util.utcnow() mock_data = { "return_time": datetime(now.year + 1, 8, 2, 12, 23, tzinfo=dt_util.UTC) } def mock_now(): return mock_data["return_time"] with patch( "homeassistant.components.statistics.sensor.dt_util.utcnow", new=mock_now ): assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", "max_age": {"minutes": 3}, } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() for value in self.values: self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS}, ) self.hass.block_till_done() # insert the next value one minute later mock_data["return_time"] += timedelta(minutes=1) state = self.hass.states.get("sensor.test") assert 6 == state.attributes.get("min_value") assert 14 == state.attributes.get("max_value") def test_max_age_without_sensor_change(self): """Test value deprecation.""" now = dt_util.utcnow() mock_data = { "return_time": datetime(now.year + 1, 8, 2, 12, 23, tzinfo=dt_util.UTC) } def mock_now(): return mock_data["return_time"] with patch( "homeassistant.components.statistics.sensor.dt_util.utcnow", new=mock_now ): assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", "max_age": {"minutes": 3}, } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() for value in self.values: self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS}, ) self.hass.block_till_done() # insert the next value 30 seconds later mock_data["return_time"] += timedelta(seconds=30) state = self.hass.states.get("sensor.test") assert 3.8 == state.attributes.get("min_value") assert 15.2 == state.attributes.get("max_value") # wait for 3 minutes (max_age). mock_data["return_time"] += timedelta(minutes=3) fire_time_changed(self.hass, mock_data["return_time"]) self.hass.block_till_done() state = self.hass.states.get("sensor.test") assert state.attributes.get("min_value") == STATE_UNKNOWN assert state.attributes.get("max_value") == STATE_UNKNOWN assert state.attributes.get("count") == 0 def test_change_rate(self): """Test min_age/max_age and change_rate.""" now = dt_util.utcnow() mock_data = { "return_time": datetime(now.year + 1, 8, 2, 12, 23, 42, tzinfo=dt_util.UTC) } def mock_now(): return mock_data["return_time"] with patch( "homeassistant.components.statistics.sensor.dt_util.utcnow", new=mock_now ): assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() for value in self.values: self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS}, ) self.hass.block_till_done() # insert the next value one minute later mock_data["return_time"] += timedelta(minutes=1) state = self.hass.states.get("sensor.test") assert datetime( now.year + 1, 8, 2, 12, 23, 42, tzinfo=dt_util.UTC ) == state.attributes.get("min_age") assert datetime( now.year + 1, 8, 2, 12, 23 + self.count - 1, 42, tzinfo=dt_util.UTC ) == state.attributes.get("max_age") assert self.change_rate == state.attributes.get("change_rate") def test_initialize_from_database(self): """Test initializing the statistics from the database.""" # enable the recorder init_recorder_component(self.hass) self.hass.block_till_done() self.hass.data[recorder.DATA_INSTANCE].block_till_done() # store some values for value in self.values: self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS} ) self.hass.block_till_done() # wait for the recorder to really store the data wait_recording_done(self.hass) # only now create the statistics component, so that it must read the # data from the database assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", "sampling_size": 100, } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() # check if the result is as in test_sensor_source() state = self.hass.states.get("sensor.test") assert str(self.mean) == state.state def test_initialize_from_database_with_maxage(self): """Test initializing the statistics from the database.""" now = dt_util.utcnow() mock_data = { "return_time": datetime(now.year + 1, 8, 2, 12, 23, 42, tzinfo=dt_util.UTC) } def mock_now(): return mock_data["return_time"] # Testing correct retrieval from recorder, thus we do not # want purging to occur within the class itself. def mock_purge(self): return # Set maximum age to 3 hours. max_age = 3 # Determine what our minimum age should be based on test values. expected_min_age = mock_data["return_time"] + timedelta( hours=len(self.values) - max_age ) # enable the recorder init_recorder_component(self.hass) self.hass.block_till_done() self.hass.data[recorder.DATA_INSTANCE].block_till_done() with patch( "homeassistant.components.statistics.sensor.dt_util.utcnow", new=mock_now ), patch.object(StatisticsSensor, "_purge_old", mock_purge): # store some values for value in self.values: self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS}, ) self.hass.block_till_done() # insert the next value 1 hour later mock_data["return_time"] += timedelta(hours=1) # wait for the recorder to really store the data wait_recording_done(self.hass) # only now create the statistics component, so that it must read # the data from the database assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", "sampling_size": 100, "max_age": {"hours": max_age}, } }, ) self.hass.block_till_done() self.hass.block_till_done() self.hass.start() self.hass.block_till_done() # check if the result is as in test_sensor_source() state = self.hass.states.get("sensor.test") assert expected_min_age == state.attributes.get("min_age") # The max_age timestamp should be 1 hour before what we have right # now in mock_data['return_time']. assert mock_data["return_time"] == state.attributes.get("max_age") + timedelta( hours=1 )
pschmitt/home-assistant
tests/components/statistics/test_sensor.py
homeassistant/components/serial_pm/sensor.py
"""Support for Luftdaten sensors.""" import logging from homeassistant.const import ( ATTR_ATTRIBUTION, ATTR_LATITUDE, ATTR_LONGITUDE, CONF_SHOW_ON_MAP, ) from homeassistant.core import callback from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity import Entity from . import ( DATA_LUFTDATEN, DATA_LUFTDATEN_CLIENT, DEFAULT_ATTRIBUTION, DOMAIN, SENSORS, TOPIC_UPDATE, ) from .const import ATTR_SENSOR_ID _LOGGER = logging.getLogger(__name__) async def async_setup_entry(hass, entry, async_add_entities): """Set up a Luftdaten sensor based on a config entry.""" luftdaten = hass.data[DOMAIN][DATA_LUFTDATEN_CLIENT][entry.entry_id] sensors = [] for sensor_type in luftdaten.sensor_conditions: try: name, icon, unit = SENSORS[sensor_type] except KeyError: _LOGGER.debug("Unknown sensor value type: %s", sensor_type) continue sensors.append( LuftdatenSensor( luftdaten, sensor_type, name, icon, unit, entry.data[CONF_SHOW_ON_MAP] ) ) async_add_entities(sensors, True) class LuftdatenSensor(Entity): """Implementation of a Luftdaten sensor.""" def __init__(self, luftdaten, sensor_type, name, icon, unit, show): """Initialize the Luftdaten sensor.""" self._async_unsub_dispatcher_connect = None self.luftdaten = luftdaten self._icon = icon self._name = name self._data = None self.sensor_type = sensor_type self._unit_of_measurement = unit self._show_on_map = show self._attrs = {} @property def icon(self): """Return the icon.""" return self._icon @property def state(self): """Return the state of the device.""" if self._data is not None: return self._data[self.sensor_type] @property def unit_of_measurement(self): """Return the unit of measurement of this entity, if any.""" return self._unit_of_measurement @property def should_poll(self): """Disable polling.""" return False @property def unique_id(self) -> str: """Return a unique, friendly identifier for this entity.""" if self._data is not None: return f"{self._data['sensor_id']}_{self.sensor_type}" @property def device_state_attributes(self): """Return the state attributes.""" self._attrs[ATTR_ATTRIBUTION] = DEFAULT_ATTRIBUTION if self._data is not None: self._attrs[ATTR_SENSOR_ID] = self._data["sensor_id"] on_map = ATTR_LATITUDE, ATTR_LONGITUDE no_map = "lat", "long" lat_format, lon_format = on_map if self._show_on_map else no_map try: self._attrs[lon_format] = self._data["longitude"] self._attrs[lat_format] = self._data["latitude"] return self._attrs except KeyError: return async def async_added_to_hass(self): """Register callbacks.""" @callback def update(): """Update the state.""" self.async_schedule_update_ha_state(True) self._async_unsub_dispatcher_connect = async_dispatcher_connect( self.hass, TOPIC_UPDATE, update ) async def async_will_remove_from_hass(self): """Disconnect dispatcher listener when removed.""" if self._async_unsub_dispatcher_connect: self._async_unsub_dispatcher_connect() async def async_update(self): """Get the latest data and update the state.""" try: self._data = self.luftdaten.data[DATA_LUFTDATEN] except KeyError: return
"""The test for the statistics sensor platform.""" from datetime import datetime, timedelta import statistics import unittest import pytest from homeassistant.components import recorder from homeassistant.components.statistics.sensor import StatisticsSensor from homeassistant.const import ATTR_UNIT_OF_MEASUREMENT, STATE_UNKNOWN, TEMP_CELSIUS from homeassistant.setup import setup_component from homeassistant.util import dt as dt_util from tests.async_mock import patch from tests.common import ( fire_time_changed, get_test_home_assistant, init_recorder_component, ) from tests.components.recorder.common import wait_recording_done @pytest.fixture(autouse=True) def mock_legacy_time(legacy_patchable_time): """Make time patchable for all the tests.""" yield class TestStatisticsSensor(unittest.TestCase): """Test the Statistics sensor.""" def setup_method(self, method): """Set up things to be run when tests are started.""" self.hass = get_test_home_assistant() self.values = [17, 20, 15.2, 5, 3.8, 9.2, 6.7, 14, 6] self.count = len(self.values) self.min = min(self.values) self.max = max(self.values) self.total = sum(self.values) self.mean = round(sum(self.values) / len(self.values), 2) self.median = round(statistics.median(self.values), 2) self.deviation = round(statistics.stdev(self.values), 2) self.variance = round(statistics.variance(self.values), 2) self.change = round(self.values[-1] - self.values[0], 2) self.average_change = round(self.change / (len(self.values) - 1), 2) self.change_rate = round(self.change / (60 * (self.count - 1)), 2) self.addCleanup(self.hass.stop) def test_binary_sensor_source(self): """Test if source is a sensor.""" values = ["on", "off", "on", "off", "on", "off", "on"] assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "binary_sensor.test_monitored", } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() for value in values: self.hass.states.set("binary_sensor.test_monitored", value) self.hass.block_till_done() state = self.hass.states.get("sensor.test") assert str(len(values)) == state.state def test_sensor_source(self): """Test if source is a sensor.""" assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() for value in self.values: self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS} ) self.hass.block_till_done() state = self.hass.states.get("sensor.test") assert str(self.mean) == state.state assert self.min == state.attributes.get("min_value") assert self.max == state.attributes.get("max_value") assert self.variance == state.attributes.get("variance") assert self.median == state.attributes.get("median") assert self.deviation == state.attributes.get("standard_deviation") assert self.mean == state.attributes.get("mean") assert self.count == state.attributes.get("count") assert self.total == state.attributes.get("total") assert TEMP_CELSIUS == state.attributes.get("unit_of_measurement") assert self.change == state.attributes.get("change") assert self.average_change == state.attributes.get("average_change") def test_sampling_size(self): """Test rotation.""" assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", "sampling_size": 5, } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() for value in self.values: self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS} ) self.hass.block_till_done() state = self.hass.states.get("sensor.test") assert 3.8 == state.attributes.get("min_value") assert 14 == state.attributes.get("max_value") def test_sampling_size_1(self): """Test validity of stats requiring only one sample.""" assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", "sampling_size": 1, } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() for value in self.values[-3:]: # just the last 3 will do self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS} ) self.hass.block_till_done() state = self.hass.states.get("sensor.test") # require only one data point assert self.values[-1] == state.attributes.get("min_value") assert self.values[-1] == state.attributes.get("max_value") assert self.values[-1] == state.attributes.get("mean") assert self.values[-1] == state.attributes.get("median") assert self.values[-1] == state.attributes.get("total") assert 0 == state.attributes.get("change") assert 0 == state.attributes.get("average_change") # require at least two data points assert STATE_UNKNOWN == state.attributes.get("variance") assert STATE_UNKNOWN == state.attributes.get("standard_deviation") def test_max_age(self): """Test value deprecation.""" now = dt_util.utcnow() mock_data = { "return_time": datetime(now.year + 1, 8, 2, 12, 23, tzinfo=dt_util.UTC) } def mock_now(): return mock_data["return_time"] with patch( "homeassistant.components.statistics.sensor.dt_util.utcnow", new=mock_now ): assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", "max_age": {"minutes": 3}, } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() for value in self.values: self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS}, ) self.hass.block_till_done() # insert the next value one minute later mock_data["return_time"] += timedelta(minutes=1) state = self.hass.states.get("sensor.test") assert 6 == state.attributes.get("min_value") assert 14 == state.attributes.get("max_value") def test_max_age_without_sensor_change(self): """Test value deprecation.""" now = dt_util.utcnow() mock_data = { "return_time": datetime(now.year + 1, 8, 2, 12, 23, tzinfo=dt_util.UTC) } def mock_now(): return mock_data["return_time"] with patch( "homeassistant.components.statistics.sensor.dt_util.utcnow", new=mock_now ): assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", "max_age": {"minutes": 3}, } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() for value in self.values: self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS}, ) self.hass.block_till_done() # insert the next value 30 seconds later mock_data["return_time"] += timedelta(seconds=30) state = self.hass.states.get("sensor.test") assert 3.8 == state.attributes.get("min_value") assert 15.2 == state.attributes.get("max_value") # wait for 3 minutes (max_age). mock_data["return_time"] += timedelta(minutes=3) fire_time_changed(self.hass, mock_data["return_time"]) self.hass.block_till_done() state = self.hass.states.get("sensor.test") assert state.attributes.get("min_value") == STATE_UNKNOWN assert state.attributes.get("max_value") == STATE_UNKNOWN assert state.attributes.get("count") == 0 def test_change_rate(self): """Test min_age/max_age and change_rate.""" now = dt_util.utcnow() mock_data = { "return_time": datetime(now.year + 1, 8, 2, 12, 23, 42, tzinfo=dt_util.UTC) } def mock_now(): return mock_data["return_time"] with patch( "homeassistant.components.statistics.sensor.dt_util.utcnow", new=mock_now ): assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() for value in self.values: self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS}, ) self.hass.block_till_done() # insert the next value one minute later mock_data["return_time"] += timedelta(minutes=1) state = self.hass.states.get("sensor.test") assert datetime( now.year + 1, 8, 2, 12, 23, 42, tzinfo=dt_util.UTC ) == state.attributes.get("min_age") assert datetime( now.year + 1, 8, 2, 12, 23 + self.count - 1, 42, tzinfo=dt_util.UTC ) == state.attributes.get("max_age") assert self.change_rate == state.attributes.get("change_rate") def test_initialize_from_database(self): """Test initializing the statistics from the database.""" # enable the recorder init_recorder_component(self.hass) self.hass.block_till_done() self.hass.data[recorder.DATA_INSTANCE].block_till_done() # store some values for value in self.values: self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS} ) self.hass.block_till_done() # wait for the recorder to really store the data wait_recording_done(self.hass) # only now create the statistics component, so that it must read the # data from the database assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", "sampling_size": 100, } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() # check if the result is as in test_sensor_source() state = self.hass.states.get("sensor.test") assert str(self.mean) == state.state def test_initialize_from_database_with_maxage(self): """Test initializing the statistics from the database.""" now = dt_util.utcnow() mock_data = { "return_time": datetime(now.year + 1, 8, 2, 12, 23, 42, tzinfo=dt_util.UTC) } def mock_now(): return mock_data["return_time"] # Testing correct retrieval from recorder, thus we do not # want purging to occur within the class itself. def mock_purge(self): return # Set maximum age to 3 hours. max_age = 3 # Determine what our minimum age should be based on test values. expected_min_age = mock_data["return_time"] + timedelta( hours=len(self.values) - max_age ) # enable the recorder init_recorder_component(self.hass) self.hass.block_till_done() self.hass.data[recorder.DATA_INSTANCE].block_till_done() with patch( "homeassistant.components.statistics.sensor.dt_util.utcnow", new=mock_now ), patch.object(StatisticsSensor, "_purge_old", mock_purge): # store some values for value in self.values: self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS}, ) self.hass.block_till_done() # insert the next value 1 hour later mock_data["return_time"] += timedelta(hours=1) # wait for the recorder to really store the data wait_recording_done(self.hass) # only now create the statistics component, so that it must read # the data from the database assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", "sampling_size": 100, "max_age": {"hours": max_age}, } }, ) self.hass.block_till_done() self.hass.block_till_done() self.hass.start() self.hass.block_till_done() # check if the result is as in test_sensor_source() state = self.hass.states.get("sensor.test") assert expected_min_age == state.attributes.get("min_age") # The max_age timestamp should be 1 hour before what we have right # now in mock_data['return_time']. assert mock_data["return_time"] == state.attributes.get("max_age") + timedelta( hours=1 )
pschmitt/home-assistant
tests/components/statistics/test_sensor.py
homeassistant/components/luftdaten/sensor.py
"""Support for LIRC devices.""" # pylint: disable=no-member, import-error import logging import threading import time import lirc import voluptuous as vol from homeassistant.const import EVENT_HOMEASSISTANT_START, EVENT_HOMEASSISTANT_STOP _LOGGER = logging.getLogger(__name__) BUTTON_NAME = "button_name" DOMAIN = "lirc" EVENT_IR_COMMAND_RECEIVED = "ir_command_received" ICON = "mdi:remote" CONFIG_SCHEMA = vol.Schema({DOMAIN: vol.Schema({})}, extra=vol.ALLOW_EXTRA) def setup(hass, config): """Set up the LIRC capability.""" # blocking=True gives unexpected behavior (multiple responses for 1 press) # also by not blocking, we allow hass to shut down the thread gracefully # on exit. lirc.init("home-assistant", blocking=False) lirc_interface = LircInterface(hass) def _start_lirc(_event): lirc_interface.start() def _stop_lirc(_event): lirc_interface.stopped.set() hass.bus.listen_once(EVENT_HOMEASSISTANT_START, _start_lirc) hass.bus.listen_once(EVENT_HOMEASSISTANT_STOP, _stop_lirc) return True class LircInterface(threading.Thread): """ This interfaces with the lirc daemon to read IR commands. When using lirc in blocking mode, sometimes repeated commands get produced in the next read of a command so we use a thread here to just wait around until a non-empty response is obtained from lirc. """ def __init__(self, hass): """Construct a LIRC interface object.""" threading.Thread.__init__(self) self.daemon = True self.stopped = threading.Event() self.hass = hass def run(self): """Run the loop of the LIRC interface thread.""" _LOGGER.debug("LIRC interface thread started") while not self.stopped.isSet(): try: code = lirc.nextcode() # list; empty if no buttons pressed except lirc.NextCodeError: _LOGGER.warning("Error reading next code from LIRC") code = None # interpret result from python-lirc if code: code = code[0] _LOGGER.info("Got new LIRC code %s", code) self.hass.bus.fire(EVENT_IR_COMMAND_RECEIVED, {BUTTON_NAME: code}) else: time.sleep(0.2) lirc.deinit() _LOGGER.debug("LIRC interface thread stopped")
"""The test for the statistics sensor platform.""" from datetime import datetime, timedelta import statistics import unittest import pytest from homeassistant.components import recorder from homeassistant.components.statistics.sensor import StatisticsSensor from homeassistant.const import ATTR_UNIT_OF_MEASUREMENT, STATE_UNKNOWN, TEMP_CELSIUS from homeassistant.setup import setup_component from homeassistant.util import dt as dt_util from tests.async_mock import patch from tests.common import ( fire_time_changed, get_test_home_assistant, init_recorder_component, ) from tests.components.recorder.common import wait_recording_done @pytest.fixture(autouse=True) def mock_legacy_time(legacy_patchable_time): """Make time patchable for all the tests.""" yield class TestStatisticsSensor(unittest.TestCase): """Test the Statistics sensor.""" def setup_method(self, method): """Set up things to be run when tests are started.""" self.hass = get_test_home_assistant() self.values = [17, 20, 15.2, 5, 3.8, 9.2, 6.7, 14, 6] self.count = len(self.values) self.min = min(self.values) self.max = max(self.values) self.total = sum(self.values) self.mean = round(sum(self.values) / len(self.values), 2) self.median = round(statistics.median(self.values), 2) self.deviation = round(statistics.stdev(self.values), 2) self.variance = round(statistics.variance(self.values), 2) self.change = round(self.values[-1] - self.values[0], 2) self.average_change = round(self.change / (len(self.values) - 1), 2) self.change_rate = round(self.change / (60 * (self.count - 1)), 2) self.addCleanup(self.hass.stop) def test_binary_sensor_source(self): """Test if source is a sensor.""" values = ["on", "off", "on", "off", "on", "off", "on"] assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "binary_sensor.test_monitored", } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() for value in values: self.hass.states.set("binary_sensor.test_monitored", value) self.hass.block_till_done() state = self.hass.states.get("sensor.test") assert str(len(values)) == state.state def test_sensor_source(self): """Test if source is a sensor.""" assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() for value in self.values: self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS} ) self.hass.block_till_done() state = self.hass.states.get("sensor.test") assert str(self.mean) == state.state assert self.min == state.attributes.get("min_value") assert self.max == state.attributes.get("max_value") assert self.variance == state.attributes.get("variance") assert self.median == state.attributes.get("median") assert self.deviation == state.attributes.get("standard_deviation") assert self.mean == state.attributes.get("mean") assert self.count == state.attributes.get("count") assert self.total == state.attributes.get("total") assert TEMP_CELSIUS == state.attributes.get("unit_of_measurement") assert self.change == state.attributes.get("change") assert self.average_change == state.attributes.get("average_change") def test_sampling_size(self): """Test rotation.""" assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", "sampling_size": 5, } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() for value in self.values: self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS} ) self.hass.block_till_done() state = self.hass.states.get("sensor.test") assert 3.8 == state.attributes.get("min_value") assert 14 == state.attributes.get("max_value") def test_sampling_size_1(self): """Test validity of stats requiring only one sample.""" assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", "sampling_size": 1, } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() for value in self.values[-3:]: # just the last 3 will do self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS} ) self.hass.block_till_done() state = self.hass.states.get("sensor.test") # require only one data point assert self.values[-1] == state.attributes.get("min_value") assert self.values[-1] == state.attributes.get("max_value") assert self.values[-1] == state.attributes.get("mean") assert self.values[-1] == state.attributes.get("median") assert self.values[-1] == state.attributes.get("total") assert 0 == state.attributes.get("change") assert 0 == state.attributes.get("average_change") # require at least two data points assert STATE_UNKNOWN == state.attributes.get("variance") assert STATE_UNKNOWN == state.attributes.get("standard_deviation") def test_max_age(self): """Test value deprecation.""" now = dt_util.utcnow() mock_data = { "return_time": datetime(now.year + 1, 8, 2, 12, 23, tzinfo=dt_util.UTC) } def mock_now(): return mock_data["return_time"] with patch( "homeassistant.components.statistics.sensor.dt_util.utcnow", new=mock_now ): assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", "max_age": {"minutes": 3}, } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() for value in self.values: self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS}, ) self.hass.block_till_done() # insert the next value one minute later mock_data["return_time"] += timedelta(minutes=1) state = self.hass.states.get("sensor.test") assert 6 == state.attributes.get("min_value") assert 14 == state.attributes.get("max_value") def test_max_age_without_sensor_change(self): """Test value deprecation.""" now = dt_util.utcnow() mock_data = { "return_time": datetime(now.year + 1, 8, 2, 12, 23, tzinfo=dt_util.UTC) } def mock_now(): return mock_data["return_time"] with patch( "homeassistant.components.statistics.sensor.dt_util.utcnow", new=mock_now ): assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", "max_age": {"minutes": 3}, } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() for value in self.values: self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS}, ) self.hass.block_till_done() # insert the next value 30 seconds later mock_data["return_time"] += timedelta(seconds=30) state = self.hass.states.get("sensor.test") assert 3.8 == state.attributes.get("min_value") assert 15.2 == state.attributes.get("max_value") # wait for 3 minutes (max_age). mock_data["return_time"] += timedelta(minutes=3) fire_time_changed(self.hass, mock_data["return_time"]) self.hass.block_till_done() state = self.hass.states.get("sensor.test") assert state.attributes.get("min_value") == STATE_UNKNOWN assert state.attributes.get("max_value") == STATE_UNKNOWN assert state.attributes.get("count") == 0 def test_change_rate(self): """Test min_age/max_age and change_rate.""" now = dt_util.utcnow() mock_data = { "return_time": datetime(now.year + 1, 8, 2, 12, 23, 42, tzinfo=dt_util.UTC) } def mock_now(): return mock_data["return_time"] with patch( "homeassistant.components.statistics.sensor.dt_util.utcnow", new=mock_now ): assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() for value in self.values: self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS}, ) self.hass.block_till_done() # insert the next value one minute later mock_data["return_time"] += timedelta(minutes=1) state = self.hass.states.get("sensor.test") assert datetime( now.year + 1, 8, 2, 12, 23, 42, tzinfo=dt_util.UTC ) == state.attributes.get("min_age") assert datetime( now.year + 1, 8, 2, 12, 23 + self.count - 1, 42, tzinfo=dt_util.UTC ) == state.attributes.get("max_age") assert self.change_rate == state.attributes.get("change_rate") def test_initialize_from_database(self): """Test initializing the statistics from the database.""" # enable the recorder init_recorder_component(self.hass) self.hass.block_till_done() self.hass.data[recorder.DATA_INSTANCE].block_till_done() # store some values for value in self.values: self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS} ) self.hass.block_till_done() # wait for the recorder to really store the data wait_recording_done(self.hass) # only now create the statistics component, so that it must read the # data from the database assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", "sampling_size": 100, } }, ) self.hass.block_till_done() self.hass.start() self.hass.block_till_done() # check if the result is as in test_sensor_source() state = self.hass.states.get("sensor.test") assert str(self.mean) == state.state def test_initialize_from_database_with_maxage(self): """Test initializing the statistics from the database.""" now = dt_util.utcnow() mock_data = { "return_time": datetime(now.year + 1, 8, 2, 12, 23, 42, tzinfo=dt_util.UTC) } def mock_now(): return mock_data["return_time"] # Testing correct retrieval from recorder, thus we do not # want purging to occur within the class itself. def mock_purge(self): return # Set maximum age to 3 hours. max_age = 3 # Determine what our minimum age should be based on test values. expected_min_age = mock_data["return_time"] + timedelta( hours=len(self.values) - max_age ) # enable the recorder init_recorder_component(self.hass) self.hass.block_till_done() self.hass.data[recorder.DATA_INSTANCE].block_till_done() with patch( "homeassistant.components.statistics.sensor.dt_util.utcnow", new=mock_now ), patch.object(StatisticsSensor, "_purge_old", mock_purge): # store some values for value in self.values: self.hass.states.set( "sensor.test_monitored", value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS}, ) self.hass.block_till_done() # insert the next value 1 hour later mock_data["return_time"] += timedelta(hours=1) # wait for the recorder to really store the data wait_recording_done(self.hass) # only now create the statistics component, so that it must read # the data from the database assert setup_component( self.hass, "sensor", { "sensor": { "platform": "statistics", "name": "test", "entity_id": "sensor.test_monitored", "sampling_size": 100, "max_age": {"hours": max_age}, } }, ) self.hass.block_till_done() self.hass.block_till_done() self.hass.start() self.hass.block_till_done() # check if the result is as in test_sensor_source() state = self.hass.states.get("sensor.test") assert expected_min_age == state.attributes.get("min_age") # The max_age timestamp should be 1 hour before what we have right # now in mock_data['return_time']. assert mock_data["return_time"] == state.attributes.get("max_age") + timedelta( hours=1 )
pschmitt/home-assistant
tests/components/statistics/test_sensor.py
homeassistant/components/lirc/__init__.py
"""Component to integrate the Home Assistant cloud.""" from hass_nabucasa import Cloud import voluptuous as vol from homeassistant.components.alexa import const as alexa_const from homeassistant.components.google_assistant import const as ga_c from homeassistant.const import ( CONF_DESCRIPTION, CONF_MODE, CONF_NAME, CONF_REGION, EVENT_HOMEASSISTANT_STOP, ) from homeassistant.core import callback from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import config_validation as cv, entityfilter from homeassistant.loader import bind_hass from homeassistant.util.aiohttp import MockRequest from . import account_link, http_api from .client import CloudClient from .const import ( CONF_ACCOUNT_LINK_URL, CONF_ACME_DIRECTORY_SERVER, CONF_ALEXA, CONF_ALEXA_ACCESS_TOKEN_URL, CONF_ALIASES, CONF_CLOUDHOOK_CREATE_URL, CONF_COGNITO_CLIENT_ID, CONF_ENTITY_CONFIG, CONF_FILTER, CONF_GOOGLE_ACTIONS, CONF_GOOGLE_ACTIONS_REPORT_STATE_URL, CONF_RELAYER, CONF_REMOTE_API_URL, CONF_SUBSCRIPTION_INFO_URL, CONF_USER_POOL_ID, CONF_VOICE_API_URL, DOMAIN, MODE_DEV, MODE_PROD, ) from .prefs import CloudPreferences DEFAULT_MODE = MODE_PROD SERVICE_REMOTE_CONNECT = "remote_connect" SERVICE_REMOTE_DISCONNECT = "remote_disconnect" ALEXA_ENTITY_SCHEMA = vol.Schema( { vol.Optional(CONF_DESCRIPTION): cv.string, vol.Optional(alexa_const.CONF_DISPLAY_CATEGORIES): cv.string, vol.Optional(CONF_NAME): cv.string, } ) GOOGLE_ENTITY_SCHEMA = vol.Schema( { vol.Optional(CONF_NAME): cv.string, vol.Optional(CONF_ALIASES): vol.All(cv.ensure_list, [cv.string]), vol.Optional(ga_c.CONF_ROOM_HINT): cv.string, } ) ASSISTANT_SCHEMA = vol.Schema( {vol.Optional(CONF_FILTER, default=dict): entityfilter.FILTER_SCHEMA} ) ALEXA_SCHEMA = ASSISTANT_SCHEMA.extend( {vol.Optional(CONF_ENTITY_CONFIG): {cv.entity_id: ALEXA_ENTITY_SCHEMA}} ) GACTIONS_SCHEMA = ASSISTANT_SCHEMA.extend( {vol.Optional(CONF_ENTITY_CONFIG): {cv.entity_id: GOOGLE_ENTITY_SCHEMA}} ) # pylint: disable=no-value-for-parameter CONFIG_SCHEMA = vol.Schema( { DOMAIN: vol.Schema( { vol.Optional(CONF_MODE, default=DEFAULT_MODE): vol.In( [MODE_DEV, MODE_PROD] ), vol.Optional(CONF_COGNITO_CLIENT_ID): str, vol.Optional(CONF_USER_POOL_ID): str, vol.Optional(CONF_REGION): str, vol.Optional(CONF_RELAYER): str, vol.Optional(CONF_SUBSCRIPTION_INFO_URL): vol.Url(), vol.Optional(CONF_CLOUDHOOK_CREATE_URL): vol.Url(), vol.Optional(CONF_REMOTE_API_URL): vol.Url(), vol.Optional(CONF_ACME_DIRECTORY_SERVER): vol.Url(), vol.Optional(CONF_ALEXA): ALEXA_SCHEMA, vol.Optional(CONF_GOOGLE_ACTIONS): GACTIONS_SCHEMA, vol.Optional(CONF_ALEXA_ACCESS_TOKEN_URL): vol.Url(), vol.Optional(CONF_GOOGLE_ACTIONS_REPORT_STATE_URL): vol.Url(), vol.Optional(CONF_ACCOUNT_LINK_URL): vol.Url(), vol.Optional(CONF_VOICE_API_URL): vol.Url(), } ) }, extra=vol.ALLOW_EXTRA, ) class CloudNotAvailable(HomeAssistantError): """Raised when an action requires the cloud but it's not available.""" @bind_hass @callback def async_is_logged_in(hass) -> bool: """Test if user is logged in.""" return DOMAIN in hass.data and hass.data[DOMAIN].is_logged_in @bind_hass @callback def async_active_subscription(hass) -> bool: """Test if user has an active subscription.""" return async_is_logged_in(hass) and not hass.data[DOMAIN].subscription_expired @bind_hass async def async_create_cloudhook(hass, webhook_id: str) -> str: """Create a cloudhook.""" if not async_is_logged_in(hass): raise CloudNotAvailable hook = await hass.data[DOMAIN].cloudhooks.async_create(webhook_id, True) return hook["cloudhook_url"] @bind_hass async def async_delete_cloudhook(hass, webhook_id: str) -> None: """Delete a cloudhook.""" if DOMAIN not in hass.data: raise CloudNotAvailable await hass.data[DOMAIN].cloudhooks.async_delete(webhook_id) @bind_hass @callback def async_remote_ui_url(hass) -> str: """Get the remote UI URL.""" if not async_is_logged_in(hass): raise CloudNotAvailable if not hass.data[DOMAIN].client.prefs.remote_enabled: raise CloudNotAvailable if not hass.data[DOMAIN].remote.instance_domain: raise CloudNotAvailable return f"https://{hass.data[DOMAIN].remote.instance_domain}" def is_cloudhook_request(request): """Test if a request came from a cloudhook. Async friendly. """ return isinstance(request, MockRequest) async def async_setup(hass, config): """Initialize the Home Assistant cloud.""" # Process configs if DOMAIN in config: kwargs = dict(config[DOMAIN]) else: kwargs = {CONF_MODE: DEFAULT_MODE} # Alexa/Google custom config alexa_conf = kwargs.pop(CONF_ALEXA, None) or ALEXA_SCHEMA({}) google_conf = kwargs.pop(CONF_GOOGLE_ACTIONS, None) or GACTIONS_SCHEMA({}) # Cloud settings prefs = CloudPreferences(hass) await prefs.async_initialize() # Initialize Cloud websession = hass.helpers.aiohttp_client.async_get_clientsession() client = CloudClient(hass, prefs, websession, alexa_conf, google_conf) cloud = hass.data[DOMAIN] = Cloud(client, **kwargs) async def _shutdown(event): """Shutdown event.""" await cloud.stop() hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, _shutdown) async def _service_handler(service): """Handle service for cloud.""" if service.service == SERVICE_REMOTE_CONNECT: await cloud.remote.connect() await prefs.async_update(remote_enabled=True) elif service.service == SERVICE_REMOTE_DISCONNECT: await cloud.remote.disconnect() await prefs.async_update(remote_enabled=False) hass.helpers.service.async_register_admin_service( DOMAIN, SERVICE_REMOTE_CONNECT, _service_handler ) hass.helpers.service.async_register_admin_service( DOMAIN, SERVICE_REMOTE_DISCONNECT, _service_handler ) loaded = False async def _on_connect(): """Discover RemoteUI binary sensor.""" nonlocal loaded # Prevent multiple discovery if loaded: return loaded = True await hass.helpers.discovery.async_load_platform( "binary_sensor", DOMAIN, {}, config ) await hass.helpers.discovery.async_load_platform("stt", DOMAIN, {}, config) await hass.helpers.discovery.async_load_platform("tts", DOMAIN, {}, config) cloud.iot.register_on_connect(_on_connect) await cloud.start() await http_api.async_setup(hass) account_link.async_setup(hass) return True
"""Test the Plaato config flow.""" from unittest.mock import patch from pyplaato.models.device import PlaatoDeviceType import pytest from homeassistant import config_entries, data_entry_flow, setup from homeassistant.components.plaato.const import ( CONF_DEVICE_NAME, CONF_DEVICE_TYPE, CONF_USE_WEBHOOK, DOMAIN, ) from homeassistant.const import CONF_SCAN_INTERVAL, CONF_TOKEN, CONF_WEBHOOK_ID from homeassistant.data_entry_flow import RESULT_TYPE_CREATE_ENTRY, RESULT_TYPE_FORM from tests.common import MockConfigEntry BASE_URL = "http://example.com" WEBHOOK_ID = "webhook_id" UNIQUE_ID = "plaato_unique_id" @pytest.fixture(name="webhook_id") def mock_webhook_id(): """Mock webhook_id.""" with patch( "homeassistant.components.webhook.async_generate_id", return_value=WEBHOOK_ID ), patch( "homeassistant.components.webhook.async_generate_url", return_value="hook_id" ): yield async def test_show_config_form(hass): """Test show configuration form.""" await setup.async_setup_component(hass, "persistent_notification", {}) result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) assert result["type"] == "form" assert result["step_id"] == "user" async def test_show_config_form_device_type_airlock(hass): """Test show configuration form.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER}, data={ CONF_DEVICE_TYPE: PlaatoDeviceType.Airlock, CONF_DEVICE_NAME: "device_name", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" assert result["data_schema"].schema.get(CONF_TOKEN) == str assert result["data_schema"].schema.get(CONF_USE_WEBHOOK) == bool async def test_show_config_form_device_type_keg(hass): """Test show configuration form.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER}, data={CONF_DEVICE_TYPE: PlaatoDeviceType.Keg, CONF_DEVICE_NAME: "device_name"}, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" assert result["data_schema"].schema.get(CONF_TOKEN) == str assert result["data_schema"].schema.get(CONF_USE_WEBHOOK) is None async def test_show_config_form_validate_webhook(hass, webhook_id): """Test show configuration form.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "user" result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_DEVICE_TYPE: PlaatoDeviceType.Airlock, CONF_DEVICE_NAME: "device_name", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" hass.config.components.add("cloud") with patch( "homeassistant.components.cloud.async_active_subscription", return_value=True ), patch( "homeassistant.components.cloud.async_create_cloudhook", return_value="https://hooks.nabu.casa/ABCD", ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_TOKEN: "", CONF_USE_WEBHOOK: True, }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "webhook" async def test_show_config_form_validate_token(hass): """Test show configuration form.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_DEVICE_TYPE: PlaatoDeviceType.Keg, CONF_DEVICE_NAME: "device_name", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" with patch("homeassistant.components.plaato.async_setup_entry", return_value=True): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={CONF_TOKEN: "valid_token"} ) assert result["type"] == RESULT_TYPE_CREATE_ENTRY assert result["title"] == PlaatoDeviceType.Keg.name assert result["data"] == { CONF_USE_WEBHOOK: False, CONF_TOKEN: "valid_token", CONF_DEVICE_TYPE: PlaatoDeviceType.Keg, CONF_DEVICE_NAME: "device_name", } async def test_show_config_form_no_cloud_webhook(hass, webhook_id): """Test show configuration form.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_DEVICE_TYPE: PlaatoDeviceType.Airlock, CONF_DEVICE_NAME: "device_name", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_USE_WEBHOOK: True, CONF_TOKEN: "", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "webhook" assert result["errors"] is None async def test_show_config_form_api_method_no_auth_token(hass, webhook_id): """Test show configuration form.""" # Using Keg result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_DEVICE_TYPE: PlaatoDeviceType.Keg, CONF_DEVICE_NAME: "device_name", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={CONF_TOKEN: ""} ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" assert len(result["errors"]) == 1 assert result["errors"]["base"] == "no_auth_token" # Using Airlock result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_DEVICE_TYPE: PlaatoDeviceType.Airlock, CONF_DEVICE_NAME: "device_name", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={CONF_TOKEN: ""} ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" assert len(result["errors"]) == 1 assert result["errors"]["base"] == "no_api_method" async def test_options(hass): """Test updating options.""" config_entry = MockConfigEntry( domain=DOMAIN, title="NAME", data={}, options={CONF_SCAN_INTERVAL: 5}, ) config_entry.add_to_hass(hass) with patch( "homeassistant.components.plaato.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.plaato.async_setup_entry", return_value=True ) as mock_setup_entry: await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() result = await hass.config_entries.options.async_init(config_entry.entry_id) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "user" result = await hass.config_entries.options.async_configure( result["flow_id"], user_input={CONF_SCAN_INTERVAL: 10}, ) await hass.async_block_till_done() assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["data"][CONF_SCAN_INTERVAL] == 10 assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_options_webhook(hass, webhook_id): """Test updating options.""" config_entry = MockConfigEntry( domain=DOMAIN, title="NAME", data={CONF_USE_WEBHOOK: True, CONF_WEBHOOK_ID: None}, options={CONF_SCAN_INTERVAL: 5}, ) config_entry.add_to_hass(hass) with patch( "homeassistant.components.plaato.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.plaato.async_setup_entry", return_value=True ) as mock_setup_entry: await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() result = await hass.config_entries.options.async_init(config_entry.entry_id) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "webhook" assert result["description_placeholders"] == {"webhook_url": ""} result = await hass.config_entries.options.async_configure( result["flow_id"], user_input={CONF_WEBHOOK_ID: WEBHOOK_ID}, ) await hass.async_block_till_done() assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["data"][CONF_WEBHOOK_ID] == CONF_WEBHOOK_ID assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1
w1ll1am23/home-assistant
tests/components/plaato/test_config_flow.py
homeassistant/components/cloud/__init__.py
"""Support for hydrological data from the Fed. Office for the Environment.""" from datetime import timedelta import logging from swisshydrodata import SwissHydroData import voluptuous as vol from homeassistant.components.sensor import PLATFORM_SCHEMA, SensorEntity from homeassistant.const import ATTR_ATTRIBUTION, CONF_MONITORED_CONDITIONS import homeassistant.helpers.config_validation as cv from homeassistant.util import Throttle _LOGGER = logging.getLogger(__name__) ATTRIBUTION = "Data provided by the Swiss Federal Office for the Environment FOEN" ATTR_DELTA_24H = "delta-24h" ATTR_MAX_1H = "max-1h" ATTR_MAX_24H = "max-24h" ATTR_MEAN_1H = "mean-1h" ATTR_MEAN_24H = "mean-24h" ATTR_MIN_1H = "min-1h" ATTR_MIN_24H = "min-24h" ATTR_PREVIOUS_24H = "previous-24h" ATTR_STATION = "station" ATTR_STATION_UPDATE = "station_update" ATTR_WATER_BODY = "water_body" ATTR_WATER_BODY_TYPE = "water_body_type" CONF_STATION = "station" MIN_TIME_BETWEEN_UPDATES = timedelta(seconds=60) SENSOR_DISCHARGE = "discharge" SENSOR_LEVEL = "level" SENSOR_TEMPERATURE = "temperature" CONDITIONS = { SENSOR_DISCHARGE: "mdi:waves", SENSOR_LEVEL: "mdi:zodiac-aquarius", SENSOR_TEMPERATURE: "mdi:oil-temperature", } CONDITION_DETAILS = [ ATTR_DELTA_24H, ATTR_MAX_1H, ATTR_MAX_24H, ATTR_MEAN_1H, ATTR_MEAN_24H, ATTR_MIN_1H, ATTR_MIN_24H, ATTR_PREVIOUS_24H, ] PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( { vol.Required(CONF_STATION): vol.Coerce(int), vol.Optional(CONF_MONITORED_CONDITIONS, default=[SENSOR_TEMPERATURE]): vol.All( cv.ensure_list, [vol.In(CONDITIONS)] ), } ) def setup_platform(hass, config, add_entities, discovery_info=None): """Set up the Swiss hydrological sensor.""" station = config.get(CONF_STATION) monitored_conditions = config.get(CONF_MONITORED_CONDITIONS) hydro_data = HydrologicalData(station) hydro_data.update() if hydro_data.data is None: _LOGGER.error("The station doesn't exists: %s", station) return entities = [] for condition in monitored_conditions: entities.append(SwissHydrologicalDataSensor(hydro_data, station, condition)) add_entities(entities, True) class SwissHydrologicalDataSensor(SensorEntity): """Implementation of a Swiss hydrological sensor.""" def __init__(self, hydro_data, station, condition): """Initialize the Swiss hydrological sensor.""" self.hydro_data = hydro_data self._condition = condition self._data = self._state = self._unit_of_measurement = None self._icon = CONDITIONS[condition] self._station = station @property def name(self): """Return the name of the sensor.""" return "{} {}".format(self._data["water-body-name"], self._condition) @property def unique_id(self) -> str: """Return a unique, friendly identifier for this entity.""" return f"{self._station}_{self._condition}" @property def unit_of_measurement(self): """Return the unit of measurement of this entity, if any.""" if self._state is not None: return self.hydro_data.data["parameters"][self._condition]["unit"] return None @property def state(self): """Return the state of the sensor.""" if isinstance(self._state, (int, float)): return round(self._state, 2) return None @property def extra_state_attributes(self): """Return the device state attributes.""" attrs = {} if not self._data: attrs[ATTR_ATTRIBUTION] = ATTRIBUTION return attrs attrs[ATTR_WATER_BODY_TYPE] = self._data["water-body-type"] attrs[ATTR_STATION] = self._data["name"] attrs[ATTR_STATION_UPDATE] = self._data["parameters"][self._condition][ "datetime" ] attrs[ATTR_ATTRIBUTION] = ATTRIBUTION for entry in CONDITION_DETAILS: attrs[entry.replace("-", "_")] = self._data["parameters"][self._condition][ entry ] return attrs @property def icon(self): """Icon to use in the frontend.""" return self._icon def update(self): """Get the latest data and update the state.""" self.hydro_data.update() self._data = self.hydro_data.data if self._data is None: self._state = None else: self._state = self._data["parameters"][self._condition]["value"] class HydrologicalData: """The Class for handling the data retrieval.""" def __init__(self, station): """Initialize the data object.""" self.station = station self.data = None @Throttle(MIN_TIME_BETWEEN_UPDATES) def update(self): """Get the latest data.""" shd = SwissHydroData() self.data = shd.get_station(self.station)
"""Test the Plaato config flow.""" from unittest.mock import patch from pyplaato.models.device import PlaatoDeviceType import pytest from homeassistant import config_entries, data_entry_flow, setup from homeassistant.components.plaato.const import ( CONF_DEVICE_NAME, CONF_DEVICE_TYPE, CONF_USE_WEBHOOK, DOMAIN, ) from homeassistant.const import CONF_SCAN_INTERVAL, CONF_TOKEN, CONF_WEBHOOK_ID from homeassistant.data_entry_flow import RESULT_TYPE_CREATE_ENTRY, RESULT_TYPE_FORM from tests.common import MockConfigEntry BASE_URL = "http://example.com" WEBHOOK_ID = "webhook_id" UNIQUE_ID = "plaato_unique_id" @pytest.fixture(name="webhook_id") def mock_webhook_id(): """Mock webhook_id.""" with patch( "homeassistant.components.webhook.async_generate_id", return_value=WEBHOOK_ID ), patch( "homeassistant.components.webhook.async_generate_url", return_value="hook_id" ): yield async def test_show_config_form(hass): """Test show configuration form.""" await setup.async_setup_component(hass, "persistent_notification", {}) result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) assert result["type"] == "form" assert result["step_id"] == "user" async def test_show_config_form_device_type_airlock(hass): """Test show configuration form.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER}, data={ CONF_DEVICE_TYPE: PlaatoDeviceType.Airlock, CONF_DEVICE_NAME: "device_name", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" assert result["data_schema"].schema.get(CONF_TOKEN) == str assert result["data_schema"].schema.get(CONF_USE_WEBHOOK) == bool async def test_show_config_form_device_type_keg(hass): """Test show configuration form.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER}, data={CONF_DEVICE_TYPE: PlaatoDeviceType.Keg, CONF_DEVICE_NAME: "device_name"}, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" assert result["data_schema"].schema.get(CONF_TOKEN) == str assert result["data_schema"].schema.get(CONF_USE_WEBHOOK) is None async def test_show_config_form_validate_webhook(hass, webhook_id): """Test show configuration form.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "user" result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_DEVICE_TYPE: PlaatoDeviceType.Airlock, CONF_DEVICE_NAME: "device_name", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" hass.config.components.add("cloud") with patch( "homeassistant.components.cloud.async_active_subscription", return_value=True ), patch( "homeassistant.components.cloud.async_create_cloudhook", return_value="https://hooks.nabu.casa/ABCD", ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_TOKEN: "", CONF_USE_WEBHOOK: True, }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "webhook" async def test_show_config_form_validate_token(hass): """Test show configuration form.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_DEVICE_TYPE: PlaatoDeviceType.Keg, CONF_DEVICE_NAME: "device_name", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" with patch("homeassistant.components.plaato.async_setup_entry", return_value=True): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={CONF_TOKEN: "valid_token"} ) assert result["type"] == RESULT_TYPE_CREATE_ENTRY assert result["title"] == PlaatoDeviceType.Keg.name assert result["data"] == { CONF_USE_WEBHOOK: False, CONF_TOKEN: "valid_token", CONF_DEVICE_TYPE: PlaatoDeviceType.Keg, CONF_DEVICE_NAME: "device_name", } async def test_show_config_form_no_cloud_webhook(hass, webhook_id): """Test show configuration form.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_DEVICE_TYPE: PlaatoDeviceType.Airlock, CONF_DEVICE_NAME: "device_name", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_USE_WEBHOOK: True, CONF_TOKEN: "", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "webhook" assert result["errors"] is None async def test_show_config_form_api_method_no_auth_token(hass, webhook_id): """Test show configuration form.""" # Using Keg result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_DEVICE_TYPE: PlaatoDeviceType.Keg, CONF_DEVICE_NAME: "device_name", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={CONF_TOKEN: ""} ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" assert len(result["errors"]) == 1 assert result["errors"]["base"] == "no_auth_token" # Using Airlock result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_DEVICE_TYPE: PlaatoDeviceType.Airlock, CONF_DEVICE_NAME: "device_name", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={CONF_TOKEN: ""} ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" assert len(result["errors"]) == 1 assert result["errors"]["base"] == "no_api_method" async def test_options(hass): """Test updating options.""" config_entry = MockConfigEntry( domain=DOMAIN, title="NAME", data={}, options={CONF_SCAN_INTERVAL: 5}, ) config_entry.add_to_hass(hass) with patch( "homeassistant.components.plaato.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.plaato.async_setup_entry", return_value=True ) as mock_setup_entry: await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() result = await hass.config_entries.options.async_init(config_entry.entry_id) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "user" result = await hass.config_entries.options.async_configure( result["flow_id"], user_input={CONF_SCAN_INTERVAL: 10}, ) await hass.async_block_till_done() assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["data"][CONF_SCAN_INTERVAL] == 10 assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_options_webhook(hass, webhook_id): """Test updating options.""" config_entry = MockConfigEntry( domain=DOMAIN, title="NAME", data={CONF_USE_WEBHOOK: True, CONF_WEBHOOK_ID: None}, options={CONF_SCAN_INTERVAL: 5}, ) config_entry.add_to_hass(hass) with patch( "homeassistant.components.plaato.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.plaato.async_setup_entry", return_value=True ) as mock_setup_entry: await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() result = await hass.config_entries.options.async_init(config_entry.entry_id) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "webhook" assert result["description_placeholders"] == {"webhook_url": ""} result = await hass.config_entries.options.async_configure( result["flow_id"], user_input={CONF_WEBHOOK_ID: WEBHOOK_ID}, ) await hass.async_block_till_done() assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["data"][CONF_WEBHOOK_ID] == CONF_WEBHOOK_ID assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1
w1ll1am23/home-assistant
tests/components/plaato/test_config_flow.py
homeassistant/components/swiss_hydrological_data/sensor.py
"""Support for MQTT vacuums.""" import functools import voluptuous as vol from homeassistant.components.vacuum import DOMAIN from homeassistant.helpers.reload import async_setup_reload_service from .. import DOMAIN as MQTT_DOMAIN, PLATFORMS from ..mixins import async_setup_entry_helper from .schema import CONF_SCHEMA, LEGACY, MQTT_VACUUM_SCHEMA, STATE from .schema_legacy import PLATFORM_SCHEMA_LEGACY, async_setup_entity_legacy from .schema_state import PLATFORM_SCHEMA_STATE, async_setup_entity_state def validate_mqtt_vacuum(value): """Validate MQTT vacuum schema.""" schemas = {LEGACY: PLATFORM_SCHEMA_LEGACY, STATE: PLATFORM_SCHEMA_STATE} return schemas[value[CONF_SCHEMA]](value) PLATFORM_SCHEMA = vol.All( MQTT_VACUUM_SCHEMA.extend({}, extra=vol.ALLOW_EXTRA), validate_mqtt_vacuum ) async def async_setup_platform(hass, config, async_add_entities, discovery_info=None): """Set up MQTT vacuum through configuration.yaml.""" await async_setup_reload_service(hass, MQTT_DOMAIN, PLATFORMS) await _async_setup_entity(async_add_entities, config) async def async_setup_entry(hass, config_entry, async_add_entities): """Set up MQTT vacuum dynamically through MQTT discovery.""" setup = functools.partial( _async_setup_entity, async_add_entities, config_entry=config_entry ) await async_setup_entry_helper(hass, DOMAIN, setup, PLATFORM_SCHEMA) async def _async_setup_entity( async_add_entities, config, config_entry=None, discovery_data=None ): """Set up the MQTT vacuum.""" setup_entity = {LEGACY: async_setup_entity_legacy, STATE: async_setup_entity_state} await setup_entity[config[CONF_SCHEMA]]( config, async_add_entities, config_entry, discovery_data )
"""Test the Plaato config flow.""" from unittest.mock import patch from pyplaato.models.device import PlaatoDeviceType import pytest from homeassistant import config_entries, data_entry_flow, setup from homeassistant.components.plaato.const import ( CONF_DEVICE_NAME, CONF_DEVICE_TYPE, CONF_USE_WEBHOOK, DOMAIN, ) from homeassistant.const import CONF_SCAN_INTERVAL, CONF_TOKEN, CONF_WEBHOOK_ID from homeassistant.data_entry_flow import RESULT_TYPE_CREATE_ENTRY, RESULT_TYPE_FORM from tests.common import MockConfigEntry BASE_URL = "http://example.com" WEBHOOK_ID = "webhook_id" UNIQUE_ID = "plaato_unique_id" @pytest.fixture(name="webhook_id") def mock_webhook_id(): """Mock webhook_id.""" with patch( "homeassistant.components.webhook.async_generate_id", return_value=WEBHOOK_ID ), patch( "homeassistant.components.webhook.async_generate_url", return_value="hook_id" ): yield async def test_show_config_form(hass): """Test show configuration form.""" await setup.async_setup_component(hass, "persistent_notification", {}) result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) assert result["type"] == "form" assert result["step_id"] == "user" async def test_show_config_form_device_type_airlock(hass): """Test show configuration form.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER}, data={ CONF_DEVICE_TYPE: PlaatoDeviceType.Airlock, CONF_DEVICE_NAME: "device_name", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" assert result["data_schema"].schema.get(CONF_TOKEN) == str assert result["data_schema"].schema.get(CONF_USE_WEBHOOK) == bool async def test_show_config_form_device_type_keg(hass): """Test show configuration form.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER}, data={CONF_DEVICE_TYPE: PlaatoDeviceType.Keg, CONF_DEVICE_NAME: "device_name"}, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" assert result["data_schema"].schema.get(CONF_TOKEN) == str assert result["data_schema"].schema.get(CONF_USE_WEBHOOK) is None async def test_show_config_form_validate_webhook(hass, webhook_id): """Test show configuration form.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "user" result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_DEVICE_TYPE: PlaatoDeviceType.Airlock, CONF_DEVICE_NAME: "device_name", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" hass.config.components.add("cloud") with patch( "homeassistant.components.cloud.async_active_subscription", return_value=True ), patch( "homeassistant.components.cloud.async_create_cloudhook", return_value="https://hooks.nabu.casa/ABCD", ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_TOKEN: "", CONF_USE_WEBHOOK: True, }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "webhook" async def test_show_config_form_validate_token(hass): """Test show configuration form.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_DEVICE_TYPE: PlaatoDeviceType.Keg, CONF_DEVICE_NAME: "device_name", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" with patch("homeassistant.components.plaato.async_setup_entry", return_value=True): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={CONF_TOKEN: "valid_token"} ) assert result["type"] == RESULT_TYPE_CREATE_ENTRY assert result["title"] == PlaatoDeviceType.Keg.name assert result["data"] == { CONF_USE_WEBHOOK: False, CONF_TOKEN: "valid_token", CONF_DEVICE_TYPE: PlaatoDeviceType.Keg, CONF_DEVICE_NAME: "device_name", } async def test_show_config_form_no_cloud_webhook(hass, webhook_id): """Test show configuration form.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_DEVICE_TYPE: PlaatoDeviceType.Airlock, CONF_DEVICE_NAME: "device_name", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_USE_WEBHOOK: True, CONF_TOKEN: "", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "webhook" assert result["errors"] is None async def test_show_config_form_api_method_no_auth_token(hass, webhook_id): """Test show configuration form.""" # Using Keg result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_DEVICE_TYPE: PlaatoDeviceType.Keg, CONF_DEVICE_NAME: "device_name", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={CONF_TOKEN: ""} ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" assert len(result["errors"]) == 1 assert result["errors"]["base"] == "no_auth_token" # Using Airlock result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_DEVICE_TYPE: PlaatoDeviceType.Airlock, CONF_DEVICE_NAME: "device_name", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={CONF_TOKEN: ""} ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" assert len(result["errors"]) == 1 assert result["errors"]["base"] == "no_api_method" async def test_options(hass): """Test updating options.""" config_entry = MockConfigEntry( domain=DOMAIN, title="NAME", data={}, options={CONF_SCAN_INTERVAL: 5}, ) config_entry.add_to_hass(hass) with patch( "homeassistant.components.plaato.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.plaato.async_setup_entry", return_value=True ) as mock_setup_entry: await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() result = await hass.config_entries.options.async_init(config_entry.entry_id) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "user" result = await hass.config_entries.options.async_configure( result["flow_id"], user_input={CONF_SCAN_INTERVAL: 10}, ) await hass.async_block_till_done() assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["data"][CONF_SCAN_INTERVAL] == 10 assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_options_webhook(hass, webhook_id): """Test updating options.""" config_entry = MockConfigEntry( domain=DOMAIN, title="NAME", data={CONF_USE_WEBHOOK: True, CONF_WEBHOOK_ID: None}, options={CONF_SCAN_INTERVAL: 5}, ) config_entry.add_to_hass(hass) with patch( "homeassistant.components.plaato.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.plaato.async_setup_entry", return_value=True ) as mock_setup_entry: await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() result = await hass.config_entries.options.async_init(config_entry.entry_id) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "webhook" assert result["description_placeholders"] == {"webhook_url": ""} result = await hass.config_entries.options.async_configure( result["flow_id"], user_input={CONF_WEBHOOK_ID: WEBHOOK_ID}, ) await hass.async_block_till_done() assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["data"][CONF_WEBHOOK_ID] == CONF_WEBHOOK_ID assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1
w1ll1am23/home-assistant
tests/components/plaato/test_config_flow.py
homeassistant/components/mqtt/vacuum/__init__.py
"""Models for permissions.""" from __future__ import annotations from typing import TYPE_CHECKING import attr if TYPE_CHECKING: from homeassistant.helpers import ( device_registry as dev_reg, entity_registry as ent_reg, ) @attr.s(slots=True) class PermissionLookup: """Class to hold data for permission lookups.""" entity_registry: ent_reg.EntityRegistry = attr.ib() device_registry: dev_reg.DeviceRegistry = attr.ib()
"""Test the Plaato config flow.""" from unittest.mock import patch from pyplaato.models.device import PlaatoDeviceType import pytest from homeassistant import config_entries, data_entry_flow, setup from homeassistant.components.plaato.const import ( CONF_DEVICE_NAME, CONF_DEVICE_TYPE, CONF_USE_WEBHOOK, DOMAIN, ) from homeassistant.const import CONF_SCAN_INTERVAL, CONF_TOKEN, CONF_WEBHOOK_ID from homeassistant.data_entry_flow import RESULT_TYPE_CREATE_ENTRY, RESULT_TYPE_FORM from tests.common import MockConfigEntry BASE_URL = "http://example.com" WEBHOOK_ID = "webhook_id" UNIQUE_ID = "plaato_unique_id" @pytest.fixture(name="webhook_id") def mock_webhook_id(): """Mock webhook_id.""" with patch( "homeassistant.components.webhook.async_generate_id", return_value=WEBHOOK_ID ), patch( "homeassistant.components.webhook.async_generate_url", return_value="hook_id" ): yield async def test_show_config_form(hass): """Test show configuration form.""" await setup.async_setup_component(hass, "persistent_notification", {}) result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) assert result["type"] == "form" assert result["step_id"] == "user" async def test_show_config_form_device_type_airlock(hass): """Test show configuration form.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER}, data={ CONF_DEVICE_TYPE: PlaatoDeviceType.Airlock, CONF_DEVICE_NAME: "device_name", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" assert result["data_schema"].schema.get(CONF_TOKEN) == str assert result["data_schema"].schema.get(CONF_USE_WEBHOOK) == bool async def test_show_config_form_device_type_keg(hass): """Test show configuration form.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER}, data={CONF_DEVICE_TYPE: PlaatoDeviceType.Keg, CONF_DEVICE_NAME: "device_name"}, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" assert result["data_schema"].schema.get(CONF_TOKEN) == str assert result["data_schema"].schema.get(CONF_USE_WEBHOOK) is None async def test_show_config_form_validate_webhook(hass, webhook_id): """Test show configuration form.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "user" result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_DEVICE_TYPE: PlaatoDeviceType.Airlock, CONF_DEVICE_NAME: "device_name", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" hass.config.components.add("cloud") with patch( "homeassistant.components.cloud.async_active_subscription", return_value=True ), patch( "homeassistant.components.cloud.async_create_cloudhook", return_value="https://hooks.nabu.casa/ABCD", ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_TOKEN: "", CONF_USE_WEBHOOK: True, }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "webhook" async def test_show_config_form_validate_token(hass): """Test show configuration form.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_DEVICE_TYPE: PlaatoDeviceType.Keg, CONF_DEVICE_NAME: "device_name", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" with patch("homeassistant.components.plaato.async_setup_entry", return_value=True): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={CONF_TOKEN: "valid_token"} ) assert result["type"] == RESULT_TYPE_CREATE_ENTRY assert result["title"] == PlaatoDeviceType.Keg.name assert result["data"] == { CONF_USE_WEBHOOK: False, CONF_TOKEN: "valid_token", CONF_DEVICE_TYPE: PlaatoDeviceType.Keg, CONF_DEVICE_NAME: "device_name", } async def test_show_config_form_no_cloud_webhook(hass, webhook_id): """Test show configuration form.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_DEVICE_TYPE: PlaatoDeviceType.Airlock, CONF_DEVICE_NAME: "device_name", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_USE_WEBHOOK: True, CONF_TOKEN: "", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "webhook" assert result["errors"] is None async def test_show_config_form_api_method_no_auth_token(hass, webhook_id): """Test show configuration form.""" # Using Keg result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_DEVICE_TYPE: PlaatoDeviceType.Keg, CONF_DEVICE_NAME: "device_name", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={CONF_TOKEN: ""} ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" assert len(result["errors"]) == 1 assert result["errors"]["base"] == "no_auth_token" # Using Airlock result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_DEVICE_TYPE: PlaatoDeviceType.Airlock, CONF_DEVICE_NAME: "device_name", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={CONF_TOKEN: ""} ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" assert len(result["errors"]) == 1 assert result["errors"]["base"] == "no_api_method" async def test_options(hass): """Test updating options.""" config_entry = MockConfigEntry( domain=DOMAIN, title="NAME", data={}, options={CONF_SCAN_INTERVAL: 5}, ) config_entry.add_to_hass(hass) with patch( "homeassistant.components.plaato.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.plaato.async_setup_entry", return_value=True ) as mock_setup_entry: await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() result = await hass.config_entries.options.async_init(config_entry.entry_id) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "user" result = await hass.config_entries.options.async_configure( result["flow_id"], user_input={CONF_SCAN_INTERVAL: 10}, ) await hass.async_block_till_done() assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["data"][CONF_SCAN_INTERVAL] == 10 assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_options_webhook(hass, webhook_id): """Test updating options.""" config_entry = MockConfigEntry( domain=DOMAIN, title="NAME", data={CONF_USE_WEBHOOK: True, CONF_WEBHOOK_ID: None}, options={CONF_SCAN_INTERVAL: 5}, ) config_entry.add_to_hass(hass) with patch( "homeassistant.components.plaato.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.plaato.async_setup_entry", return_value=True ) as mock_setup_entry: await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() result = await hass.config_entries.options.async_init(config_entry.entry_id) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "webhook" assert result["description_placeholders"] == {"webhook_url": ""} result = await hass.config_entries.options.async_configure( result["flow_id"], user_input={CONF_WEBHOOK_ID: WEBHOOK_ID}, ) await hass.async_block_till_done() assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["data"][CONF_WEBHOOK_ID] == CONF_WEBHOOK_ID assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1
w1ll1am23/home-assistant
tests/components/plaato/test_config_flow.py
homeassistant/auth/permissions/models.py
"""Reproduce an Input select state.""" from __future__ import annotations import asyncio import logging from types import MappingProxyType from typing import Any, Iterable from homeassistant.const import ATTR_ENTITY_ID from homeassistant.core import Context, HomeAssistant, State from . import ( ATTR_OPTION, ATTR_OPTIONS, DOMAIN, SERVICE_SELECT_OPTION, SERVICE_SET_OPTIONS, ) ATTR_GROUP = [ATTR_OPTION, ATTR_OPTIONS] _LOGGER = logging.getLogger(__name__) async def _async_reproduce_state( hass: HomeAssistant, state: State, *, context: Context | None = None, reproduce_options: dict[str, Any] | None = None, ) -> None: """Reproduce a single state.""" cur_state = hass.states.get(state.entity_id) # Return if we can't find entity if cur_state is None: _LOGGER.warning("Unable to find entity %s", state.entity_id) return # Return if we are already at the right state. if cur_state.state == state.state and all( check_attr_equal(cur_state.attributes, state.attributes, attr) for attr in ATTR_GROUP ): return # Set service data service_data = {ATTR_ENTITY_ID: state.entity_id} # If options are specified, call SERVICE_SET_OPTIONS if ATTR_OPTIONS in state.attributes: service = SERVICE_SET_OPTIONS service_data[ATTR_OPTIONS] = state.attributes[ATTR_OPTIONS] await hass.services.async_call( DOMAIN, service, service_data, context=context, blocking=True ) # Remove ATTR_OPTIONS from service_data so we can reuse service_data in next call del service_data[ATTR_OPTIONS] # Call SERVICE_SELECT_OPTION service = SERVICE_SELECT_OPTION service_data[ATTR_OPTION] = state.state await hass.services.async_call( DOMAIN, service, service_data, context=context, blocking=True ) async def async_reproduce_states( hass: HomeAssistant, states: Iterable[State], *, context: Context | None = None, reproduce_options: dict[str, Any] | None = None, ) -> None: """Reproduce Input select states.""" # Reproduce states in parallel. await asyncio.gather( *( _async_reproduce_state( hass, state, context=context, reproduce_options=reproduce_options ) for state in states ) ) def check_attr_equal( attr1: MappingProxyType, attr2: MappingProxyType, attr_str: str ) -> bool: """Return true if the given attributes are equal.""" return attr1.get(attr_str) == attr2.get(attr_str)
"""Test the Plaato config flow.""" from unittest.mock import patch from pyplaato.models.device import PlaatoDeviceType import pytest from homeassistant import config_entries, data_entry_flow, setup from homeassistant.components.plaato.const import ( CONF_DEVICE_NAME, CONF_DEVICE_TYPE, CONF_USE_WEBHOOK, DOMAIN, ) from homeassistant.const import CONF_SCAN_INTERVAL, CONF_TOKEN, CONF_WEBHOOK_ID from homeassistant.data_entry_flow import RESULT_TYPE_CREATE_ENTRY, RESULT_TYPE_FORM from tests.common import MockConfigEntry BASE_URL = "http://example.com" WEBHOOK_ID = "webhook_id" UNIQUE_ID = "plaato_unique_id" @pytest.fixture(name="webhook_id") def mock_webhook_id(): """Mock webhook_id.""" with patch( "homeassistant.components.webhook.async_generate_id", return_value=WEBHOOK_ID ), patch( "homeassistant.components.webhook.async_generate_url", return_value="hook_id" ): yield async def test_show_config_form(hass): """Test show configuration form.""" await setup.async_setup_component(hass, "persistent_notification", {}) result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) assert result["type"] == "form" assert result["step_id"] == "user" async def test_show_config_form_device_type_airlock(hass): """Test show configuration form.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER}, data={ CONF_DEVICE_TYPE: PlaatoDeviceType.Airlock, CONF_DEVICE_NAME: "device_name", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" assert result["data_schema"].schema.get(CONF_TOKEN) == str assert result["data_schema"].schema.get(CONF_USE_WEBHOOK) == bool async def test_show_config_form_device_type_keg(hass): """Test show configuration form.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER}, data={CONF_DEVICE_TYPE: PlaatoDeviceType.Keg, CONF_DEVICE_NAME: "device_name"}, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" assert result["data_schema"].schema.get(CONF_TOKEN) == str assert result["data_schema"].schema.get(CONF_USE_WEBHOOK) is None async def test_show_config_form_validate_webhook(hass, webhook_id): """Test show configuration form.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "user" result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_DEVICE_TYPE: PlaatoDeviceType.Airlock, CONF_DEVICE_NAME: "device_name", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" hass.config.components.add("cloud") with patch( "homeassistant.components.cloud.async_active_subscription", return_value=True ), patch( "homeassistant.components.cloud.async_create_cloudhook", return_value="https://hooks.nabu.casa/ABCD", ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_TOKEN: "", CONF_USE_WEBHOOK: True, }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "webhook" async def test_show_config_form_validate_token(hass): """Test show configuration form.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_DEVICE_TYPE: PlaatoDeviceType.Keg, CONF_DEVICE_NAME: "device_name", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" with patch("homeassistant.components.plaato.async_setup_entry", return_value=True): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={CONF_TOKEN: "valid_token"} ) assert result["type"] == RESULT_TYPE_CREATE_ENTRY assert result["title"] == PlaatoDeviceType.Keg.name assert result["data"] == { CONF_USE_WEBHOOK: False, CONF_TOKEN: "valid_token", CONF_DEVICE_TYPE: PlaatoDeviceType.Keg, CONF_DEVICE_NAME: "device_name", } async def test_show_config_form_no_cloud_webhook(hass, webhook_id): """Test show configuration form.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_DEVICE_TYPE: PlaatoDeviceType.Airlock, CONF_DEVICE_NAME: "device_name", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_USE_WEBHOOK: True, CONF_TOKEN: "", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "webhook" assert result["errors"] is None async def test_show_config_form_api_method_no_auth_token(hass, webhook_id): """Test show configuration form.""" # Using Keg result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_DEVICE_TYPE: PlaatoDeviceType.Keg, CONF_DEVICE_NAME: "device_name", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={CONF_TOKEN: ""} ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" assert len(result["errors"]) == 1 assert result["errors"]["base"] == "no_auth_token" # Using Airlock result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_DEVICE_TYPE: PlaatoDeviceType.Airlock, CONF_DEVICE_NAME: "device_name", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={CONF_TOKEN: ""} ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" assert len(result["errors"]) == 1 assert result["errors"]["base"] == "no_api_method" async def test_options(hass): """Test updating options.""" config_entry = MockConfigEntry( domain=DOMAIN, title="NAME", data={}, options={CONF_SCAN_INTERVAL: 5}, ) config_entry.add_to_hass(hass) with patch( "homeassistant.components.plaato.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.plaato.async_setup_entry", return_value=True ) as mock_setup_entry: await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() result = await hass.config_entries.options.async_init(config_entry.entry_id) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "user" result = await hass.config_entries.options.async_configure( result["flow_id"], user_input={CONF_SCAN_INTERVAL: 10}, ) await hass.async_block_till_done() assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["data"][CONF_SCAN_INTERVAL] == 10 assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_options_webhook(hass, webhook_id): """Test updating options.""" config_entry = MockConfigEntry( domain=DOMAIN, title="NAME", data={CONF_USE_WEBHOOK: True, CONF_WEBHOOK_ID: None}, options={CONF_SCAN_INTERVAL: 5}, ) config_entry.add_to_hass(hass) with patch( "homeassistant.components.plaato.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.plaato.async_setup_entry", return_value=True ) as mock_setup_entry: await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() result = await hass.config_entries.options.async_init(config_entry.entry_id) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "webhook" assert result["description_placeholders"] == {"webhook_url": ""} result = await hass.config_entries.options.async_configure( result["flow_id"], user_input={CONF_WEBHOOK_ID: WEBHOOK_ID}, ) await hass.async_block_till_done() assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["data"][CONF_WEBHOOK_ID] == CONF_WEBHOOK_ID assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1
w1ll1am23/home-assistant
tests/components/plaato/test_config_flow.py
homeassistant/components/input_select/reproduce_state.py
"""Support for OpenTherm Gateway devices.""" import asyncio from datetime import date, datetime import logging import pyotgw import pyotgw.vars as gw_vars import voluptuous as vol from homeassistant.components.binary_sensor import DOMAIN as COMP_BINARY_SENSOR from homeassistant.components.climate import DOMAIN as COMP_CLIMATE from homeassistant.components.sensor import DOMAIN as COMP_SENSOR from homeassistant.config_entries import SOURCE_IMPORT from homeassistant.const import ( ATTR_DATE, ATTR_ID, ATTR_MODE, ATTR_TEMPERATURE, ATTR_TIME, CONF_DEVICE, CONF_ID, CONF_NAME, EVENT_HOMEASSISTANT_STOP, PRECISION_HALVES, PRECISION_TENTHS, PRECISION_WHOLE, ) import homeassistant.helpers.config_validation as cv from homeassistant.helpers.device_registry import ( async_get_registry as async_get_dev_reg, ) from homeassistant.helpers.dispatcher import async_dispatcher_send from .const import ( ATTR_CH_OVRD, ATTR_DHW_OVRD, ATTR_GW_ID, ATTR_LEVEL, CONF_CLIMATE, CONF_FLOOR_TEMP, CONF_PRECISION, CONF_READ_PRECISION, CONF_SET_PRECISION, DATA_GATEWAYS, DATA_OPENTHERM_GW, DOMAIN, SERVICE_RESET_GATEWAY, SERVICE_SET_CH_OVRD, SERVICE_SET_CLOCK, SERVICE_SET_CONTROL_SETPOINT, SERVICE_SET_GPIO_MODE, SERVICE_SET_HOT_WATER_OVRD, SERVICE_SET_HOT_WATER_SETPOINT, SERVICE_SET_LED_MODE, SERVICE_SET_MAX_MOD, SERVICE_SET_OAT, SERVICE_SET_SB_TEMP, ) _LOGGER = logging.getLogger(__name__) CLIMATE_SCHEMA = vol.Schema( { vol.Optional(CONF_PRECISION): vol.In( [PRECISION_TENTHS, PRECISION_HALVES, PRECISION_WHOLE] ), vol.Optional(CONF_FLOOR_TEMP, default=False): cv.boolean, } ) CONFIG_SCHEMA = vol.Schema( { DOMAIN: cv.schema_with_slug_keys( { vol.Required(CONF_DEVICE): cv.string, vol.Optional(CONF_CLIMATE, default={}): CLIMATE_SCHEMA, vol.Optional(CONF_NAME): cv.string, } ) }, extra=vol.ALLOW_EXTRA, ) async def options_updated(hass, entry): """Handle options update.""" gateway = hass.data[DATA_OPENTHERM_GW][DATA_GATEWAYS][entry.data[CONF_ID]] async_dispatcher_send(hass, gateway.options_update_signal, entry) async def async_setup_entry(hass, config_entry): """Set up the OpenTherm Gateway component.""" if DATA_OPENTHERM_GW not in hass.data: hass.data[DATA_OPENTHERM_GW] = {DATA_GATEWAYS: {}} gateway = OpenThermGatewayDevice(hass, config_entry) hass.data[DATA_OPENTHERM_GW][DATA_GATEWAYS][config_entry.data[CONF_ID]] = gateway if config_entry.options.get(CONF_PRECISION): migrate_options = dict(config_entry.options) migrate_options.update( { CONF_READ_PRECISION: config_entry.options[CONF_PRECISION], CONF_SET_PRECISION: config_entry.options[CONF_PRECISION], } ) del migrate_options[CONF_PRECISION] hass.config_entries.async_update_entry(config_entry, options=migrate_options) config_entry.add_update_listener(options_updated) # Schedule directly on the loop to avoid blocking HA startup. hass.loop.create_task(gateway.connect_and_subscribe()) for comp in [COMP_BINARY_SENSOR, COMP_CLIMATE, COMP_SENSOR]: hass.async_create_task( hass.config_entries.async_forward_entry_setup(config_entry, comp) ) register_services(hass) return True async def async_setup(hass, config): """Set up the OpenTherm Gateway component.""" if not hass.config_entries.async_entries(DOMAIN) and DOMAIN in config: conf = config[DOMAIN] for device_id, device_config in conf.items(): device_config[CONF_ID] = device_id hass.async_create_task( hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_IMPORT}, data=device_config ) ) return True def register_services(hass): """Register services for the component.""" service_reset_schema = vol.Schema( { vol.Required(ATTR_GW_ID): vol.All( cv.string, vol.In(hass.data[DATA_OPENTHERM_GW][DATA_GATEWAYS]) ) } ) service_set_central_heating_ovrd_schema = vol.Schema( { vol.Required(ATTR_GW_ID): vol.All( cv.string, vol.In(hass.data[DATA_OPENTHERM_GW][DATA_GATEWAYS]) ), vol.Required(ATTR_CH_OVRD): cv.boolean, } ) service_set_clock_schema = vol.Schema( { vol.Required(ATTR_GW_ID): vol.All( cv.string, vol.In(hass.data[DATA_OPENTHERM_GW][DATA_GATEWAYS]) ), vol.Optional(ATTR_DATE, default=date.today()): cv.date, vol.Optional(ATTR_TIME, default=datetime.now().time()): cv.time, } ) service_set_control_setpoint_schema = vol.Schema( { vol.Required(ATTR_GW_ID): vol.All( cv.string, vol.In(hass.data[DATA_OPENTHERM_GW][DATA_GATEWAYS]) ), vol.Required(ATTR_TEMPERATURE): vol.All( vol.Coerce(float), vol.Range(min=0, max=90) ), } ) service_set_hot_water_setpoint_schema = service_set_control_setpoint_schema service_set_hot_water_ovrd_schema = vol.Schema( { vol.Required(ATTR_GW_ID): vol.All( cv.string, vol.In(hass.data[DATA_OPENTHERM_GW][DATA_GATEWAYS]) ), vol.Required(ATTR_DHW_OVRD): vol.Any( vol.Equal("A"), vol.All(vol.Coerce(int), vol.Range(min=0, max=1)) ), } ) service_set_gpio_mode_schema = vol.Schema( vol.Any( vol.Schema( { vol.Required(ATTR_GW_ID): vol.All( cv.string, vol.In(hass.data[DATA_OPENTHERM_GW][DATA_GATEWAYS]) ), vol.Required(ATTR_ID): vol.Equal("A"), vol.Required(ATTR_MODE): vol.All( vol.Coerce(int), vol.Range(min=0, max=6) ), } ), vol.Schema( { vol.Required(ATTR_GW_ID): vol.All( cv.string, vol.In(hass.data[DATA_OPENTHERM_GW][DATA_GATEWAYS]) ), vol.Required(ATTR_ID): vol.Equal("B"), vol.Required(ATTR_MODE): vol.All( vol.Coerce(int), vol.Range(min=0, max=7) ), } ), ) ) service_set_led_mode_schema = vol.Schema( { vol.Required(ATTR_GW_ID): vol.All( cv.string, vol.In(hass.data[DATA_OPENTHERM_GW][DATA_GATEWAYS]) ), vol.Required(ATTR_ID): vol.In("ABCDEF"), vol.Required(ATTR_MODE): vol.In("RXTBOFHWCEMP"), } ) service_set_max_mod_schema = vol.Schema( { vol.Required(ATTR_GW_ID): vol.All( cv.string, vol.In(hass.data[DATA_OPENTHERM_GW][DATA_GATEWAYS]) ), vol.Required(ATTR_LEVEL): vol.All( vol.Coerce(int), vol.Range(min=-1, max=100) ), } ) service_set_oat_schema = vol.Schema( { vol.Required(ATTR_GW_ID): vol.All( cv.string, vol.In(hass.data[DATA_OPENTHERM_GW][DATA_GATEWAYS]) ), vol.Required(ATTR_TEMPERATURE): vol.All( vol.Coerce(float), vol.Range(min=-40, max=99) ), } ) service_set_sb_temp_schema = vol.Schema( { vol.Required(ATTR_GW_ID): vol.All( cv.string, vol.In(hass.data[DATA_OPENTHERM_GW][DATA_GATEWAYS]) ), vol.Required(ATTR_TEMPERATURE): vol.All( vol.Coerce(float), vol.Range(min=0, max=30) ), } ) async def reset_gateway(call): """Reset the OpenTherm Gateway.""" gw_dev = hass.data[DATA_OPENTHERM_GW][DATA_GATEWAYS][call.data[ATTR_GW_ID]] mode_rst = gw_vars.OTGW_MODE_RESET status = await gw_dev.gateway.set_mode(mode_rst) gw_dev.status = status async_dispatcher_send(hass, gw_dev.update_signal, gw_dev.status) hass.services.async_register( DOMAIN, SERVICE_RESET_GATEWAY, reset_gateway, service_reset_schema ) async def set_ch_ovrd(call): """Set the central heating override on the OpenTherm Gateway.""" gw_dev = hass.data[DATA_OPENTHERM_GW][DATA_GATEWAYS][call.data[ATTR_GW_ID]] await gw_dev.gateway.set_ch_enable_bit(1 if call.data[ATTR_CH_OVRD] else 0) hass.services.async_register( DOMAIN, SERVICE_SET_CH_OVRD, set_ch_ovrd, service_set_central_heating_ovrd_schema, ) async def set_control_setpoint(call): """Set the control setpoint on the OpenTherm Gateway.""" gw_dev = hass.data[DATA_OPENTHERM_GW][DATA_GATEWAYS][call.data[ATTR_GW_ID]] gw_var = gw_vars.DATA_CONTROL_SETPOINT value = await gw_dev.gateway.set_control_setpoint(call.data[ATTR_TEMPERATURE]) gw_dev.status.update({gw_var: value}) async_dispatcher_send(hass, gw_dev.update_signal, gw_dev.status) hass.services.async_register( DOMAIN, SERVICE_SET_CONTROL_SETPOINT, set_control_setpoint, service_set_control_setpoint_schema, ) async def set_dhw_ovrd(call): """Set the domestic hot water override on the OpenTherm Gateway.""" gw_dev = hass.data[DATA_OPENTHERM_GW][DATA_GATEWAYS][call.data[ATTR_GW_ID]] gw_var = gw_vars.OTGW_DHW_OVRD value = await gw_dev.gateway.set_hot_water_ovrd(call.data[ATTR_DHW_OVRD]) gw_dev.status.update({gw_var: value}) async_dispatcher_send(hass, gw_dev.update_signal, gw_dev.status) hass.services.async_register( DOMAIN, SERVICE_SET_HOT_WATER_OVRD, set_dhw_ovrd, service_set_hot_water_ovrd_schema, ) async def set_dhw_setpoint(call): """Set the domestic hot water setpoint on the OpenTherm Gateway.""" gw_dev = hass.data[DATA_OPENTHERM_GW][DATA_GATEWAYS][call.data[ATTR_GW_ID]] gw_var = gw_vars.DATA_DHW_SETPOINT value = await gw_dev.gateway.set_dhw_setpoint(call.data[ATTR_TEMPERATURE]) gw_dev.status.update({gw_var: value}) async_dispatcher_send(hass, gw_dev.update_signal, gw_dev.status) hass.services.async_register( DOMAIN, SERVICE_SET_HOT_WATER_SETPOINT, set_dhw_setpoint, service_set_hot_water_setpoint_schema, ) async def set_device_clock(call): """Set the clock on the OpenTherm Gateway.""" gw_dev = hass.data[DATA_OPENTHERM_GW][DATA_GATEWAYS][call.data[ATTR_GW_ID]] attr_date = call.data[ATTR_DATE] attr_time = call.data[ATTR_TIME] await gw_dev.gateway.set_clock(datetime.combine(attr_date, attr_time)) hass.services.async_register( DOMAIN, SERVICE_SET_CLOCK, set_device_clock, service_set_clock_schema ) async def set_gpio_mode(call): """Set the OpenTherm Gateway GPIO modes.""" gw_dev = hass.data[DATA_OPENTHERM_GW][DATA_GATEWAYS][call.data[ATTR_GW_ID]] gpio_id = call.data[ATTR_ID] gpio_mode = call.data[ATTR_MODE] mode = await gw_dev.gateway.set_gpio_mode(gpio_id, gpio_mode) gpio_var = getattr(gw_vars, f"OTGW_GPIO_{gpio_id}") gw_dev.status.update({gpio_var: mode}) async_dispatcher_send(hass, gw_dev.update_signal, gw_dev.status) hass.services.async_register( DOMAIN, SERVICE_SET_GPIO_MODE, set_gpio_mode, service_set_gpio_mode_schema ) async def set_led_mode(call): """Set the OpenTherm Gateway LED modes.""" gw_dev = hass.data[DATA_OPENTHERM_GW][DATA_GATEWAYS][call.data[ATTR_GW_ID]] led_id = call.data[ATTR_ID] led_mode = call.data[ATTR_MODE] mode = await gw_dev.gateway.set_led_mode(led_id, led_mode) led_var = getattr(gw_vars, f"OTGW_LED_{led_id}") gw_dev.status.update({led_var: mode}) async_dispatcher_send(hass, gw_dev.update_signal, gw_dev.status) hass.services.async_register( DOMAIN, SERVICE_SET_LED_MODE, set_led_mode, service_set_led_mode_schema ) async def set_max_mod(call): """Set the max modulation level.""" gw_dev = hass.data[DATA_OPENTHERM_GW][DATA_GATEWAYS][call.data[ATTR_GW_ID]] gw_var = gw_vars.DATA_SLAVE_MAX_RELATIVE_MOD level = call.data[ATTR_LEVEL] if level == -1: # Backend only clears setting on non-numeric values. level = "-" value = await gw_dev.gateway.set_max_relative_mod(level) gw_dev.status.update({gw_var: value}) async_dispatcher_send(hass, gw_dev.update_signal, gw_dev.status) hass.services.async_register( DOMAIN, SERVICE_SET_MAX_MOD, set_max_mod, service_set_max_mod_schema ) async def set_outside_temp(call): """Provide the outside temperature to the OpenTherm Gateway.""" gw_dev = hass.data[DATA_OPENTHERM_GW][DATA_GATEWAYS][call.data[ATTR_GW_ID]] gw_var = gw_vars.DATA_OUTSIDE_TEMP value = await gw_dev.gateway.set_outside_temp(call.data[ATTR_TEMPERATURE]) gw_dev.status.update({gw_var: value}) async_dispatcher_send(hass, gw_dev.update_signal, gw_dev.status) hass.services.async_register( DOMAIN, SERVICE_SET_OAT, set_outside_temp, service_set_oat_schema ) async def set_setback_temp(call): """Set the OpenTherm Gateway SetBack temperature.""" gw_dev = hass.data[DATA_OPENTHERM_GW][DATA_GATEWAYS][call.data[ATTR_GW_ID]] gw_var = gw_vars.OTGW_SB_TEMP value = await gw_dev.gateway.set_setback_temp(call.data[ATTR_TEMPERATURE]) gw_dev.status.update({gw_var: value}) async_dispatcher_send(hass, gw_dev.update_signal, gw_dev.status) hass.services.async_register( DOMAIN, SERVICE_SET_SB_TEMP, set_setback_temp, service_set_sb_temp_schema ) async def async_unload_entry(hass, entry): """Cleanup and disconnect from gateway.""" await asyncio.gather( hass.config_entries.async_forward_entry_unload(entry, COMP_BINARY_SENSOR), hass.config_entries.async_forward_entry_unload(entry, COMP_CLIMATE), hass.config_entries.async_forward_entry_unload(entry, COMP_SENSOR), ) gateway = hass.data[DATA_OPENTHERM_GW][DATA_GATEWAYS][entry.data[CONF_ID]] await gateway.cleanup() return True class OpenThermGatewayDevice: """OpenTherm Gateway device class.""" def __init__(self, hass, config_entry): """Initialize the OpenTherm Gateway.""" self.hass = hass self.device_path = config_entry.data[CONF_DEVICE] self.gw_id = config_entry.data[CONF_ID] self.name = config_entry.data[CONF_NAME] self.climate_config = config_entry.options self.config_entry_id = config_entry.entry_id self.status = {} self.update_signal = f"{DATA_OPENTHERM_GW}_{self.gw_id}_update" self.options_update_signal = f"{DATA_OPENTHERM_GW}_{self.gw_id}_options_update" self.gateway = pyotgw.pyotgw() self.gw_version = None async def cleanup(self, event=None): """Reset overrides on the gateway.""" await self.gateway.set_control_setpoint(0) await self.gateway.set_max_relative_mod("-") await self.gateway.disconnect() async def connect_and_subscribe(self): """Connect to serial device and subscribe report handler.""" self.status = await self.gateway.connect(self.hass.loop, self.device_path) version_string = self.status[gw_vars.OTGW].get(gw_vars.OTGW_ABOUT) self.gw_version = version_string[18:] if version_string else None _LOGGER.debug( "Connected to OpenTherm Gateway %s at %s", self.gw_version, self.device_path ) dev_reg = await async_get_dev_reg(self.hass) gw_dev = dev_reg.async_get_or_create( config_entry_id=self.config_entry_id, identifiers={(DOMAIN, self.gw_id)}, name=self.name, manufacturer="Schelte Bron", model="OpenTherm Gateway", sw_version=self.gw_version, ) if gw_dev.sw_version != self.gw_version: dev_reg.async_update_device(gw_dev.id, sw_version=self.gw_version) self.hass.bus.async_listen(EVENT_HOMEASSISTANT_STOP, self.cleanup) async def handle_report(status): """Handle reports from the OpenTherm Gateway.""" _LOGGER.debug("Received report: %s", status) self.status = status async_dispatcher_send(self.hass, self.update_signal, status) self.gateway.subscribe(handle_report)
"""Test the Plaato config flow.""" from unittest.mock import patch from pyplaato.models.device import PlaatoDeviceType import pytest from homeassistant import config_entries, data_entry_flow, setup from homeassistant.components.plaato.const import ( CONF_DEVICE_NAME, CONF_DEVICE_TYPE, CONF_USE_WEBHOOK, DOMAIN, ) from homeassistant.const import CONF_SCAN_INTERVAL, CONF_TOKEN, CONF_WEBHOOK_ID from homeassistant.data_entry_flow import RESULT_TYPE_CREATE_ENTRY, RESULT_TYPE_FORM from tests.common import MockConfigEntry BASE_URL = "http://example.com" WEBHOOK_ID = "webhook_id" UNIQUE_ID = "plaato_unique_id" @pytest.fixture(name="webhook_id") def mock_webhook_id(): """Mock webhook_id.""" with patch( "homeassistant.components.webhook.async_generate_id", return_value=WEBHOOK_ID ), patch( "homeassistant.components.webhook.async_generate_url", return_value="hook_id" ): yield async def test_show_config_form(hass): """Test show configuration form.""" await setup.async_setup_component(hass, "persistent_notification", {}) result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) assert result["type"] == "form" assert result["step_id"] == "user" async def test_show_config_form_device_type_airlock(hass): """Test show configuration form.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER}, data={ CONF_DEVICE_TYPE: PlaatoDeviceType.Airlock, CONF_DEVICE_NAME: "device_name", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" assert result["data_schema"].schema.get(CONF_TOKEN) == str assert result["data_schema"].schema.get(CONF_USE_WEBHOOK) == bool async def test_show_config_form_device_type_keg(hass): """Test show configuration form.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER}, data={CONF_DEVICE_TYPE: PlaatoDeviceType.Keg, CONF_DEVICE_NAME: "device_name"}, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" assert result["data_schema"].schema.get(CONF_TOKEN) == str assert result["data_schema"].schema.get(CONF_USE_WEBHOOK) is None async def test_show_config_form_validate_webhook(hass, webhook_id): """Test show configuration form.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "user" result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_DEVICE_TYPE: PlaatoDeviceType.Airlock, CONF_DEVICE_NAME: "device_name", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" hass.config.components.add("cloud") with patch( "homeassistant.components.cloud.async_active_subscription", return_value=True ), patch( "homeassistant.components.cloud.async_create_cloudhook", return_value="https://hooks.nabu.casa/ABCD", ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_TOKEN: "", CONF_USE_WEBHOOK: True, }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "webhook" async def test_show_config_form_validate_token(hass): """Test show configuration form.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_DEVICE_TYPE: PlaatoDeviceType.Keg, CONF_DEVICE_NAME: "device_name", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" with patch("homeassistant.components.plaato.async_setup_entry", return_value=True): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={CONF_TOKEN: "valid_token"} ) assert result["type"] == RESULT_TYPE_CREATE_ENTRY assert result["title"] == PlaatoDeviceType.Keg.name assert result["data"] == { CONF_USE_WEBHOOK: False, CONF_TOKEN: "valid_token", CONF_DEVICE_TYPE: PlaatoDeviceType.Keg, CONF_DEVICE_NAME: "device_name", } async def test_show_config_form_no_cloud_webhook(hass, webhook_id): """Test show configuration form.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_DEVICE_TYPE: PlaatoDeviceType.Airlock, CONF_DEVICE_NAME: "device_name", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_USE_WEBHOOK: True, CONF_TOKEN: "", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "webhook" assert result["errors"] is None async def test_show_config_form_api_method_no_auth_token(hass, webhook_id): """Test show configuration form.""" # Using Keg result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_DEVICE_TYPE: PlaatoDeviceType.Keg, CONF_DEVICE_NAME: "device_name", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={CONF_TOKEN: ""} ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" assert len(result["errors"]) == 1 assert result["errors"]["base"] == "no_auth_token" # Using Airlock result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_DEVICE_TYPE: PlaatoDeviceType.Airlock, CONF_DEVICE_NAME: "device_name", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={CONF_TOKEN: ""} ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" assert len(result["errors"]) == 1 assert result["errors"]["base"] == "no_api_method" async def test_options(hass): """Test updating options.""" config_entry = MockConfigEntry( domain=DOMAIN, title="NAME", data={}, options={CONF_SCAN_INTERVAL: 5}, ) config_entry.add_to_hass(hass) with patch( "homeassistant.components.plaato.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.plaato.async_setup_entry", return_value=True ) as mock_setup_entry: await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() result = await hass.config_entries.options.async_init(config_entry.entry_id) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "user" result = await hass.config_entries.options.async_configure( result["flow_id"], user_input={CONF_SCAN_INTERVAL: 10}, ) await hass.async_block_till_done() assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["data"][CONF_SCAN_INTERVAL] == 10 assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_options_webhook(hass, webhook_id): """Test updating options.""" config_entry = MockConfigEntry( domain=DOMAIN, title="NAME", data={CONF_USE_WEBHOOK: True, CONF_WEBHOOK_ID: None}, options={CONF_SCAN_INTERVAL: 5}, ) config_entry.add_to_hass(hass) with patch( "homeassistant.components.plaato.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.plaato.async_setup_entry", return_value=True ) as mock_setup_entry: await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() result = await hass.config_entries.options.async_init(config_entry.entry_id) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "webhook" assert result["description_placeholders"] == {"webhook_url": ""} result = await hass.config_entries.options.async_configure( result["flow_id"], user_input={CONF_WEBHOOK_ID: WEBHOOK_ID}, ) await hass.async_block_till_done() assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["data"][CONF_WEBHOOK_ID] == CONF_WEBHOOK_ID assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1
w1ll1am23/home-assistant
tests/components/plaato/test_config_flow.py
homeassistant/components/opentherm_gw/__init__.py
"""Constants for the AVM Fritz!Box integration.""" import logging ATTR_STATE_BATTERY_LOW = "battery_low" ATTR_STATE_DEVICE_LOCKED = "device_locked" ATTR_STATE_HOLIDAY_MODE = "holiday_mode" ATTR_STATE_LOCKED = "locked" ATTR_STATE_SUMMER_MODE = "summer_mode" ATTR_STATE_WINDOW_OPEN = "window_open" ATTR_TEMPERATURE_UNIT = "temperature_unit" ATTR_TOTAL_CONSUMPTION = "total_consumption" ATTR_TOTAL_CONSUMPTION_UNIT = "total_consumption_unit" CONF_CONNECTIONS = "connections" DEFAULT_HOST = "fritz.box" DEFAULT_USERNAME = "admin" DOMAIN = "fritzbox" LOGGER = logging.getLogger(__package__) PLATFORMS = ["binary_sensor", "climate", "switch", "sensor"]
"""Test the Plaato config flow.""" from unittest.mock import patch from pyplaato.models.device import PlaatoDeviceType import pytest from homeassistant import config_entries, data_entry_flow, setup from homeassistant.components.plaato.const import ( CONF_DEVICE_NAME, CONF_DEVICE_TYPE, CONF_USE_WEBHOOK, DOMAIN, ) from homeassistant.const import CONF_SCAN_INTERVAL, CONF_TOKEN, CONF_WEBHOOK_ID from homeassistant.data_entry_flow import RESULT_TYPE_CREATE_ENTRY, RESULT_TYPE_FORM from tests.common import MockConfigEntry BASE_URL = "http://example.com" WEBHOOK_ID = "webhook_id" UNIQUE_ID = "plaato_unique_id" @pytest.fixture(name="webhook_id") def mock_webhook_id(): """Mock webhook_id.""" with patch( "homeassistant.components.webhook.async_generate_id", return_value=WEBHOOK_ID ), patch( "homeassistant.components.webhook.async_generate_url", return_value="hook_id" ): yield async def test_show_config_form(hass): """Test show configuration form.""" await setup.async_setup_component(hass, "persistent_notification", {}) result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) assert result["type"] == "form" assert result["step_id"] == "user" async def test_show_config_form_device_type_airlock(hass): """Test show configuration form.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER}, data={ CONF_DEVICE_TYPE: PlaatoDeviceType.Airlock, CONF_DEVICE_NAME: "device_name", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" assert result["data_schema"].schema.get(CONF_TOKEN) == str assert result["data_schema"].schema.get(CONF_USE_WEBHOOK) == bool async def test_show_config_form_device_type_keg(hass): """Test show configuration form.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER}, data={CONF_DEVICE_TYPE: PlaatoDeviceType.Keg, CONF_DEVICE_NAME: "device_name"}, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" assert result["data_schema"].schema.get(CONF_TOKEN) == str assert result["data_schema"].schema.get(CONF_USE_WEBHOOK) is None async def test_show_config_form_validate_webhook(hass, webhook_id): """Test show configuration form.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "user" result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_DEVICE_TYPE: PlaatoDeviceType.Airlock, CONF_DEVICE_NAME: "device_name", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" hass.config.components.add("cloud") with patch( "homeassistant.components.cloud.async_active_subscription", return_value=True ), patch( "homeassistant.components.cloud.async_create_cloudhook", return_value="https://hooks.nabu.casa/ABCD", ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_TOKEN: "", CONF_USE_WEBHOOK: True, }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "webhook" async def test_show_config_form_validate_token(hass): """Test show configuration form.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_DEVICE_TYPE: PlaatoDeviceType.Keg, CONF_DEVICE_NAME: "device_name", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" with patch("homeassistant.components.plaato.async_setup_entry", return_value=True): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={CONF_TOKEN: "valid_token"} ) assert result["type"] == RESULT_TYPE_CREATE_ENTRY assert result["title"] == PlaatoDeviceType.Keg.name assert result["data"] == { CONF_USE_WEBHOOK: False, CONF_TOKEN: "valid_token", CONF_DEVICE_TYPE: PlaatoDeviceType.Keg, CONF_DEVICE_NAME: "device_name", } async def test_show_config_form_no_cloud_webhook(hass, webhook_id): """Test show configuration form.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_DEVICE_TYPE: PlaatoDeviceType.Airlock, CONF_DEVICE_NAME: "device_name", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_USE_WEBHOOK: True, CONF_TOKEN: "", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "webhook" assert result["errors"] is None async def test_show_config_form_api_method_no_auth_token(hass, webhook_id): """Test show configuration form.""" # Using Keg result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_DEVICE_TYPE: PlaatoDeviceType.Keg, CONF_DEVICE_NAME: "device_name", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={CONF_TOKEN: ""} ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" assert len(result["errors"]) == 1 assert result["errors"]["base"] == "no_auth_token" # Using Airlock result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_DEVICE_TYPE: PlaatoDeviceType.Airlock, CONF_DEVICE_NAME: "device_name", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={CONF_TOKEN: ""} ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" assert len(result["errors"]) == 1 assert result["errors"]["base"] == "no_api_method" async def test_options(hass): """Test updating options.""" config_entry = MockConfigEntry( domain=DOMAIN, title="NAME", data={}, options={CONF_SCAN_INTERVAL: 5}, ) config_entry.add_to_hass(hass) with patch( "homeassistant.components.plaato.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.plaato.async_setup_entry", return_value=True ) as mock_setup_entry: await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() result = await hass.config_entries.options.async_init(config_entry.entry_id) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "user" result = await hass.config_entries.options.async_configure( result["flow_id"], user_input={CONF_SCAN_INTERVAL: 10}, ) await hass.async_block_till_done() assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["data"][CONF_SCAN_INTERVAL] == 10 assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_options_webhook(hass, webhook_id): """Test updating options.""" config_entry = MockConfigEntry( domain=DOMAIN, title="NAME", data={CONF_USE_WEBHOOK: True, CONF_WEBHOOK_ID: None}, options={CONF_SCAN_INTERVAL: 5}, ) config_entry.add_to_hass(hass) with patch( "homeassistant.components.plaato.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.plaato.async_setup_entry", return_value=True ) as mock_setup_entry: await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() result = await hass.config_entries.options.async_init(config_entry.entry_id) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "webhook" assert result["description_placeholders"] == {"webhook_url": ""} result = await hass.config_entries.options.async_configure( result["flow_id"], user_input={CONF_WEBHOOK_ID: WEBHOOK_ID}, ) await hass.async_block_till_done() assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["data"][CONF_WEBHOOK_ID] == CONF_WEBHOOK_ID assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1
w1ll1am23/home-assistant
tests/components/plaato/test_config_flow.py
homeassistant/components/fritzbox/const.py
"""Support for ESPHome lights.""" from __future__ import annotations from aioesphomeapi import LightInfo, LightState from homeassistant.components.light import ( ATTR_BRIGHTNESS, ATTR_COLOR_TEMP, ATTR_EFFECT, ATTR_FLASH, ATTR_HS_COLOR, ATTR_TRANSITION, ATTR_WHITE_VALUE, FLASH_LONG, FLASH_SHORT, SUPPORT_BRIGHTNESS, SUPPORT_COLOR, SUPPORT_COLOR_TEMP, SUPPORT_EFFECT, SUPPORT_FLASH, SUPPORT_TRANSITION, SUPPORT_WHITE_VALUE, LightEntity, ) from homeassistant.config_entries import ConfigEntry from homeassistant.helpers.typing import HomeAssistantType import homeassistant.util.color as color_util from . import EsphomeEntity, esphome_state_property, platform_async_setup_entry FLASH_LENGTHS = {FLASH_SHORT: 2, FLASH_LONG: 10} async def async_setup_entry( hass: HomeAssistantType, entry: ConfigEntry, async_add_entities ) -> None: """Set up ESPHome lights based on a config entry.""" await platform_async_setup_entry( hass, entry, async_add_entities, component_key="light", info_type=LightInfo, entity_type=EsphomeLight, state_type=LightState, ) class EsphomeLight(EsphomeEntity, LightEntity): """A switch implementation for ESPHome.""" @property def _static_info(self) -> LightInfo: return super()._static_info @property def _state(self) -> LightState | None: return super()._state # https://github.com/PyCQA/pylint/issues/3150 for all @esphome_state_property # pylint: disable=invalid-overridden-method @esphome_state_property def is_on(self) -> bool | None: """Return true if the switch is on.""" return self._state.state async def async_turn_on(self, **kwargs) -> None: """Turn the entity on.""" data = {"key": self._static_info.key, "state": True} if ATTR_HS_COLOR in kwargs: hue, sat = kwargs[ATTR_HS_COLOR] red, green, blue = color_util.color_hsv_to_RGB(hue, sat, 100) data["rgb"] = (red / 255, green / 255, blue / 255) if ATTR_FLASH in kwargs: data["flash_length"] = FLASH_LENGTHS[kwargs[ATTR_FLASH]] if ATTR_TRANSITION in kwargs: data["transition_length"] = kwargs[ATTR_TRANSITION] if ATTR_BRIGHTNESS in kwargs: data["brightness"] = kwargs[ATTR_BRIGHTNESS] / 255 if ATTR_COLOR_TEMP in kwargs: data["color_temperature"] = kwargs[ATTR_COLOR_TEMP] if ATTR_EFFECT in kwargs: data["effect"] = kwargs[ATTR_EFFECT] if ATTR_WHITE_VALUE in kwargs: data["white"] = kwargs[ATTR_WHITE_VALUE] / 255 await self._client.light_command(**data) async def async_turn_off(self, **kwargs) -> None: """Turn the entity off.""" data = {"key": self._static_info.key, "state": False} if ATTR_FLASH in kwargs: data["flash_length"] = FLASH_LENGTHS[kwargs[ATTR_FLASH]] if ATTR_TRANSITION in kwargs: data["transition_length"] = kwargs[ATTR_TRANSITION] await self._client.light_command(**data) @esphome_state_property def brightness(self) -> int | None: """Return the brightness of this light between 0..255.""" return round(self._state.brightness * 255) @esphome_state_property def hs_color(self) -> tuple[float, float] | None: """Return the hue and saturation color value [float, float].""" return color_util.color_RGB_to_hs( self._state.red * 255, self._state.green * 255, self._state.blue * 255 ) @esphome_state_property def color_temp(self) -> float | None: """Return the CT color value in mireds.""" return self._state.color_temperature @esphome_state_property def white_value(self) -> int | None: """Return the white value of this light between 0..255.""" return round(self._state.white * 255) @esphome_state_property def effect(self) -> str | None: """Return the current effect.""" return self._state.effect @property def supported_features(self) -> int: """Flag supported features.""" flags = SUPPORT_FLASH if self._static_info.supports_brightness: flags |= SUPPORT_BRIGHTNESS flags |= SUPPORT_TRANSITION if self._static_info.supports_rgb: flags |= SUPPORT_COLOR if self._static_info.supports_white_value: flags |= SUPPORT_WHITE_VALUE if self._static_info.supports_color_temperature: flags |= SUPPORT_COLOR_TEMP if self._static_info.effects: flags |= SUPPORT_EFFECT return flags @property def effect_list(self) -> list[str]: """Return the list of supported effects.""" return self._static_info.effects @property def min_mireds(self) -> float: """Return the coldest color_temp that this light supports.""" return self._static_info.min_mireds @property def max_mireds(self) -> float: """Return the warmest color_temp that this light supports.""" return self._static_info.max_mireds
"""Test the Plaato config flow.""" from unittest.mock import patch from pyplaato.models.device import PlaatoDeviceType import pytest from homeassistant import config_entries, data_entry_flow, setup from homeassistant.components.plaato.const import ( CONF_DEVICE_NAME, CONF_DEVICE_TYPE, CONF_USE_WEBHOOK, DOMAIN, ) from homeassistant.const import CONF_SCAN_INTERVAL, CONF_TOKEN, CONF_WEBHOOK_ID from homeassistant.data_entry_flow import RESULT_TYPE_CREATE_ENTRY, RESULT_TYPE_FORM from tests.common import MockConfigEntry BASE_URL = "http://example.com" WEBHOOK_ID = "webhook_id" UNIQUE_ID = "plaato_unique_id" @pytest.fixture(name="webhook_id") def mock_webhook_id(): """Mock webhook_id.""" with patch( "homeassistant.components.webhook.async_generate_id", return_value=WEBHOOK_ID ), patch( "homeassistant.components.webhook.async_generate_url", return_value="hook_id" ): yield async def test_show_config_form(hass): """Test show configuration form.""" await setup.async_setup_component(hass, "persistent_notification", {}) result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) assert result["type"] == "form" assert result["step_id"] == "user" async def test_show_config_form_device_type_airlock(hass): """Test show configuration form.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER}, data={ CONF_DEVICE_TYPE: PlaatoDeviceType.Airlock, CONF_DEVICE_NAME: "device_name", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" assert result["data_schema"].schema.get(CONF_TOKEN) == str assert result["data_schema"].schema.get(CONF_USE_WEBHOOK) == bool async def test_show_config_form_device_type_keg(hass): """Test show configuration form.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER}, data={CONF_DEVICE_TYPE: PlaatoDeviceType.Keg, CONF_DEVICE_NAME: "device_name"}, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" assert result["data_schema"].schema.get(CONF_TOKEN) == str assert result["data_schema"].schema.get(CONF_USE_WEBHOOK) is None async def test_show_config_form_validate_webhook(hass, webhook_id): """Test show configuration form.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "user" result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_DEVICE_TYPE: PlaatoDeviceType.Airlock, CONF_DEVICE_NAME: "device_name", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" hass.config.components.add("cloud") with patch( "homeassistant.components.cloud.async_active_subscription", return_value=True ), patch( "homeassistant.components.cloud.async_create_cloudhook", return_value="https://hooks.nabu.casa/ABCD", ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_TOKEN: "", CONF_USE_WEBHOOK: True, }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "webhook" async def test_show_config_form_validate_token(hass): """Test show configuration form.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_DEVICE_TYPE: PlaatoDeviceType.Keg, CONF_DEVICE_NAME: "device_name", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" with patch("homeassistant.components.plaato.async_setup_entry", return_value=True): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={CONF_TOKEN: "valid_token"} ) assert result["type"] == RESULT_TYPE_CREATE_ENTRY assert result["title"] == PlaatoDeviceType.Keg.name assert result["data"] == { CONF_USE_WEBHOOK: False, CONF_TOKEN: "valid_token", CONF_DEVICE_TYPE: PlaatoDeviceType.Keg, CONF_DEVICE_NAME: "device_name", } async def test_show_config_form_no_cloud_webhook(hass, webhook_id): """Test show configuration form.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_DEVICE_TYPE: PlaatoDeviceType.Airlock, CONF_DEVICE_NAME: "device_name", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_USE_WEBHOOK: True, CONF_TOKEN: "", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "webhook" assert result["errors"] is None async def test_show_config_form_api_method_no_auth_token(hass, webhook_id): """Test show configuration form.""" # Using Keg result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_DEVICE_TYPE: PlaatoDeviceType.Keg, CONF_DEVICE_NAME: "device_name", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={CONF_TOKEN: ""} ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" assert len(result["errors"]) == 1 assert result["errors"]["base"] == "no_auth_token" # Using Airlock result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_DEVICE_TYPE: PlaatoDeviceType.Airlock, CONF_DEVICE_NAME: "device_name", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={CONF_TOKEN: ""} ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" assert len(result["errors"]) == 1 assert result["errors"]["base"] == "no_api_method" async def test_options(hass): """Test updating options.""" config_entry = MockConfigEntry( domain=DOMAIN, title="NAME", data={}, options={CONF_SCAN_INTERVAL: 5}, ) config_entry.add_to_hass(hass) with patch( "homeassistant.components.plaato.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.plaato.async_setup_entry", return_value=True ) as mock_setup_entry: await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() result = await hass.config_entries.options.async_init(config_entry.entry_id) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "user" result = await hass.config_entries.options.async_configure( result["flow_id"], user_input={CONF_SCAN_INTERVAL: 10}, ) await hass.async_block_till_done() assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["data"][CONF_SCAN_INTERVAL] == 10 assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_options_webhook(hass, webhook_id): """Test updating options.""" config_entry = MockConfigEntry( domain=DOMAIN, title="NAME", data={CONF_USE_WEBHOOK: True, CONF_WEBHOOK_ID: None}, options={CONF_SCAN_INTERVAL: 5}, ) config_entry.add_to_hass(hass) with patch( "homeassistant.components.plaato.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.plaato.async_setup_entry", return_value=True ) as mock_setup_entry: await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() result = await hass.config_entries.options.async_init(config_entry.entry_id) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "webhook" assert result["description_placeholders"] == {"webhook_url": ""} result = await hass.config_entries.options.async_configure( result["flow_id"], user_input={CONF_WEBHOOK_ID: WEBHOOK_ID}, ) await hass.async_block_till_done() assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["data"][CONF_WEBHOOK_ID] == CONF_WEBHOOK_ID assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1
w1ll1am23/home-assistant
tests/components/plaato/test_config_flow.py
homeassistant/components/esphome/light.py
"""This platform provides support for sensor data from RainMachine.""" from functools import partial from typing import Callable from regenmaschine.controller import Controller from homeassistant.components.sensor import SensorEntity from homeassistant.config_entries import ConfigEntry from homeassistant.const import TEMP_CELSIUS, VOLUME_CUBIC_METERS from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.update_coordinator import DataUpdateCoordinator from . import RainMachineEntity from .const import ( DATA_CONTROLLER, DATA_COORDINATOR, DATA_PROVISION_SETTINGS, DATA_RESTRICTIONS_UNIVERSAL, DOMAIN, ) TYPE_FLOW_SENSOR_CLICK_M3 = "flow_sensor_clicks_cubic_meter" TYPE_FLOW_SENSOR_CONSUMED_LITERS = "flow_sensor_consumed_liters" TYPE_FLOW_SENSOR_START_INDEX = "flow_sensor_start_index" TYPE_FLOW_SENSOR_WATERING_CLICKS = "flow_sensor_watering_clicks" TYPE_FREEZE_TEMP = "freeze_protect_temp" SENSORS = { TYPE_FLOW_SENSOR_CLICK_M3: ( "Flow Sensor Clicks", "mdi:water-pump", f"clicks/{VOLUME_CUBIC_METERS}", None, False, DATA_PROVISION_SETTINGS, ), TYPE_FLOW_SENSOR_CONSUMED_LITERS: ( "Flow Sensor Consumed Liters", "mdi:water-pump", "liter", None, False, DATA_PROVISION_SETTINGS, ), TYPE_FLOW_SENSOR_START_INDEX: ( "Flow Sensor Start Index", "mdi:water-pump", "index", None, False, DATA_PROVISION_SETTINGS, ), TYPE_FLOW_SENSOR_WATERING_CLICKS: ( "Flow Sensor Clicks", "mdi:water-pump", "clicks", None, False, DATA_PROVISION_SETTINGS, ), TYPE_FREEZE_TEMP: ( "Freeze Protect Temperature", "mdi:thermometer", TEMP_CELSIUS, "temperature", True, DATA_RESTRICTIONS_UNIVERSAL, ), } async def async_setup_entry( hass: HomeAssistant, entry: ConfigEntry, async_add_entities: Callable ) -> None: """Set up RainMachine sensors based on a config entry.""" controller = hass.data[DOMAIN][DATA_CONTROLLER][entry.entry_id] coordinators = hass.data[DOMAIN][DATA_COORDINATOR][entry.entry_id] @callback def async_get_sensor(api_category: str) -> partial: """Generate the appropriate sensor object for an API category.""" if api_category == DATA_PROVISION_SETTINGS: return partial( ProvisionSettingsSensor, coordinators[DATA_PROVISION_SETTINGS], ) return partial( UniversalRestrictionsSensor, coordinators[DATA_RESTRICTIONS_UNIVERSAL], ) async_add_entities( [ async_get_sensor(api_category)( controller, sensor_type, name, icon, unit, device_class, enabled_by_default, ) for ( sensor_type, (name, icon, unit, device_class, enabled_by_default, api_category), ) in SENSORS.items() ] ) class RainMachineSensor(RainMachineEntity, SensorEntity): """Define a general RainMachine sensor.""" def __init__( self, coordinator: DataUpdateCoordinator, controller: Controller, sensor_type: str, name: str, icon: str, unit: str, device_class: str, enabled_by_default: bool, ) -> None: """Initialize.""" super().__init__(coordinator, controller) self._device_class = device_class self._enabled_by_default = enabled_by_default self._icon = icon self._name = name self._sensor_type = sensor_type self._state = None self._unit = unit @property def entity_registry_enabled_default(self) -> bool: """Determine whether an entity is enabled by default.""" return self._enabled_by_default @property def icon(self) -> str: """Return the icon.""" return self._icon @property def state(self) -> str: """Return the name of the entity.""" return self._state @property def unique_id(self) -> str: """Return a unique, Home Assistant friendly identifier for this entity.""" return f"{self._unique_id}_{self._sensor_type}" @property def unit_of_measurement(self) -> str: """Return the unit the value is expressed in.""" return self._unit class ProvisionSettingsSensor(RainMachineSensor): """Define a sensor that handles provisioning data.""" @callback def update_from_latest_data(self) -> None: """Update the state.""" if self._sensor_type == TYPE_FLOW_SENSOR_CLICK_M3: self._state = self.coordinator.data["system"].get( "flowSensorClicksPerCubicMeter" ) elif self._sensor_type == TYPE_FLOW_SENSOR_CONSUMED_LITERS: clicks = self.coordinator.data["system"].get("flowSensorWateringClicks") clicks_per_m3 = self.coordinator.data["system"].get( "flowSensorClicksPerCubicMeter" ) if clicks and clicks_per_m3: self._state = (clicks * 1000) / clicks_per_m3 else: self._state = None elif self._sensor_type == TYPE_FLOW_SENSOR_START_INDEX: self._state = self.coordinator.data["system"].get("flowSensorStartIndex") elif self._sensor_type == TYPE_FLOW_SENSOR_WATERING_CLICKS: self._state = self.coordinator.data["system"].get( "flowSensorWateringClicks" ) class UniversalRestrictionsSensor(RainMachineSensor): """Define a sensor that handles universal restrictions data.""" @callback def update_from_latest_data(self) -> None: """Update the state.""" if self._sensor_type == TYPE_FREEZE_TEMP: self._state = self.coordinator.data["freezeProtectTemp"]
"""Test the Plaato config flow.""" from unittest.mock import patch from pyplaato.models.device import PlaatoDeviceType import pytest from homeassistant import config_entries, data_entry_flow, setup from homeassistant.components.plaato.const import ( CONF_DEVICE_NAME, CONF_DEVICE_TYPE, CONF_USE_WEBHOOK, DOMAIN, ) from homeassistant.const import CONF_SCAN_INTERVAL, CONF_TOKEN, CONF_WEBHOOK_ID from homeassistant.data_entry_flow import RESULT_TYPE_CREATE_ENTRY, RESULT_TYPE_FORM from tests.common import MockConfigEntry BASE_URL = "http://example.com" WEBHOOK_ID = "webhook_id" UNIQUE_ID = "plaato_unique_id" @pytest.fixture(name="webhook_id") def mock_webhook_id(): """Mock webhook_id.""" with patch( "homeassistant.components.webhook.async_generate_id", return_value=WEBHOOK_ID ), patch( "homeassistant.components.webhook.async_generate_url", return_value="hook_id" ): yield async def test_show_config_form(hass): """Test show configuration form.""" await setup.async_setup_component(hass, "persistent_notification", {}) result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) assert result["type"] == "form" assert result["step_id"] == "user" async def test_show_config_form_device_type_airlock(hass): """Test show configuration form.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER}, data={ CONF_DEVICE_TYPE: PlaatoDeviceType.Airlock, CONF_DEVICE_NAME: "device_name", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" assert result["data_schema"].schema.get(CONF_TOKEN) == str assert result["data_schema"].schema.get(CONF_USE_WEBHOOK) == bool async def test_show_config_form_device_type_keg(hass): """Test show configuration form.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER}, data={CONF_DEVICE_TYPE: PlaatoDeviceType.Keg, CONF_DEVICE_NAME: "device_name"}, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" assert result["data_schema"].schema.get(CONF_TOKEN) == str assert result["data_schema"].schema.get(CONF_USE_WEBHOOK) is None async def test_show_config_form_validate_webhook(hass, webhook_id): """Test show configuration form.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "user" result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_DEVICE_TYPE: PlaatoDeviceType.Airlock, CONF_DEVICE_NAME: "device_name", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" hass.config.components.add("cloud") with patch( "homeassistant.components.cloud.async_active_subscription", return_value=True ), patch( "homeassistant.components.cloud.async_create_cloudhook", return_value="https://hooks.nabu.casa/ABCD", ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_TOKEN: "", CONF_USE_WEBHOOK: True, }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "webhook" async def test_show_config_form_validate_token(hass): """Test show configuration form.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_DEVICE_TYPE: PlaatoDeviceType.Keg, CONF_DEVICE_NAME: "device_name", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" with patch("homeassistant.components.plaato.async_setup_entry", return_value=True): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={CONF_TOKEN: "valid_token"} ) assert result["type"] == RESULT_TYPE_CREATE_ENTRY assert result["title"] == PlaatoDeviceType.Keg.name assert result["data"] == { CONF_USE_WEBHOOK: False, CONF_TOKEN: "valid_token", CONF_DEVICE_TYPE: PlaatoDeviceType.Keg, CONF_DEVICE_NAME: "device_name", } async def test_show_config_form_no_cloud_webhook(hass, webhook_id): """Test show configuration form.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_DEVICE_TYPE: PlaatoDeviceType.Airlock, CONF_DEVICE_NAME: "device_name", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_USE_WEBHOOK: True, CONF_TOKEN: "", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "webhook" assert result["errors"] is None async def test_show_config_form_api_method_no_auth_token(hass, webhook_id): """Test show configuration form.""" # Using Keg result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_DEVICE_TYPE: PlaatoDeviceType.Keg, CONF_DEVICE_NAME: "device_name", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={CONF_TOKEN: ""} ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" assert len(result["errors"]) == 1 assert result["errors"]["base"] == "no_auth_token" # Using Airlock result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_DEVICE_TYPE: PlaatoDeviceType.Airlock, CONF_DEVICE_NAME: "device_name", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={CONF_TOKEN: ""} ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" assert len(result["errors"]) == 1 assert result["errors"]["base"] == "no_api_method" async def test_options(hass): """Test updating options.""" config_entry = MockConfigEntry( domain=DOMAIN, title="NAME", data={}, options={CONF_SCAN_INTERVAL: 5}, ) config_entry.add_to_hass(hass) with patch( "homeassistant.components.plaato.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.plaato.async_setup_entry", return_value=True ) as mock_setup_entry: await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() result = await hass.config_entries.options.async_init(config_entry.entry_id) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "user" result = await hass.config_entries.options.async_configure( result["flow_id"], user_input={CONF_SCAN_INTERVAL: 10}, ) await hass.async_block_till_done() assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["data"][CONF_SCAN_INTERVAL] == 10 assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_options_webhook(hass, webhook_id): """Test updating options.""" config_entry = MockConfigEntry( domain=DOMAIN, title="NAME", data={CONF_USE_WEBHOOK: True, CONF_WEBHOOK_ID: None}, options={CONF_SCAN_INTERVAL: 5}, ) config_entry.add_to_hass(hass) with patch( "homeassistant.components.plaato.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.plaato.async_setup_entry", return_value=True ) as mock_setup_entry: await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() result = await hass.config_entries.options.async_init(config_entry.entry_id) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "webhook" assert result["description_placeholders"] == {"webhook_url": ""} result = await hass.config_entries.options.async_configure( result["flow_id"], user_input={CONF_WEBHOOK_ID: WEBHOOK_ID}, ) await hass.async_block_till_done() assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["data"][CONF_WEBHOOK_ID] == CONF_WEBHOOK_ID assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1
w1ll1am23/home-assistant
tests/components/plaato/test_config_flow.py
homeassistant/components/rainmachine/sensor.py
"""Define a config flow manager for AirVisual.""" import asyncio from pyairvisual import CloudAPI, NodeSamba from pyairvisual.errors import ( AirVisualError, InvalidKeyError, NodeProError, NotFoundError, ) import voluptuous as vol from homeassistant import config_entries from homeassistant.const import ( CONF_API_KEY, CONF_IP_ADDRESS, CONF_LATITUDE, CONF_LONGITUDE, CONF_PASSWORD, CONF_SHOW_ON_MAP, CONF_STATE, ) from homeassistant.core import callback from homeassistant.helpers import aiohttp_client, config_validation as cv from . import async_get_geography_id from .const import ( CONF_CITY, CONF_COUNTRY, CONF_INTEGRATION_TYPE, DOMAIN, INTEGRATION_TYPE_GEOGRAPHY_COORDS, INTEGRATION_TYPE_GEOGRAPHY_NAME, INTEGRATION_TYPE_NODE_PRO, LOGGER, ) API_KEY_DATA_SCHEMA = vol.Schema({vol.Required(CONF_API_KEY): cv.string}) GEOGRAPHY_NAME_SCHEMA = API_KEY_DATA_SCHEMA.extend( { vol.Required(CONF_CITY): cv.string, vol.Required(CONF_STATE): cv.string, vol.Required(CONF_COUNTRY): cv.string, } ) NODE_PRO_SCHEMA = vol.Schema( {vol.Required(CONF_IP_ADDRESS): str, vol.Required(CONF_PASSWORD): cv.string} ) PICK_INTEGRATION_TYPE_SCHEMA = vol.Schema( { vol.Required("type"): vol.In( [ INTEGRATION_TYPE_GEOGRAPHY_COORDS, INTEGRATION_TYPE_GEOGRAPHY_NAME, INTEGRATION_TYPE_NODE_PRO, ] ) } ) class AirVisualFlowHandler(config_entries.ConfigFlow, domain=DOMAIN): """Handle an AirVisual config flow.""" VERSION = 2 CONNECTION_CLASS = config_entries.CONN_CLASS_CLOUD_POLL def __init__(self): """Initialize the config flow.""" self._entry_data_for_reauth = None self._geo_id = None @property def geography_coords_schema(self): """Return the data schema for the cloud API.""" return API_KEY_DATA_SCHEMA.extend( { vol.Required( CONF_LATITUDE, default=self.hass.config.latitude ): cv.latitude, vol.Required( CONF_LONGITUDE, default=self.hass.config.longitude ): cv.longitude, } ) async def _async_finish_geography(self, user_input, integration_type): """Validate a Cloud API key.""" websession = aiohttp_client.async_get_clientsession(self.hass) cloud_api = CloudAPI(user_input[CONF_API_KEY], session=websession) # If this is the first (and only the first) time we've seen this API key, check # that it's valid: valid_keys = self.hass.data.setdefault("airvisual_checked_api_keys", set()) valid_keys_lock = self.hass.data.setdefault( "airvisual_checked_api_keys_lock", asyncio.Lock() ) if integration_type == INTEGRATION_TYPE_GEOGRAPHY_COORDS: coro = cloud_api.air_quality.nearest_city() error_schema = self.geography_coords_schema error_step = "geography_by_coords" else: coro = cloud_api.air_quality.city( user_input[CONF_CITY], user_input[CONF_STATE], user_input[CONF_COUNTRY] ) error_schema = GEOGRAPHY_NAME_SCHEMA error_step = "geography_by_name" async with valid_keys_lock: if user_input[CONF_API_KEY] not in valid_keys: try: await coro except InvalidKeyError: return self.async_show_form( step_id=error_step, data_schema=error_schema, errors={CONF_API_KEY: "invalid_api_key"}, ) except NotFoundError: return self.async_show_form( step_id=error_step, data_schema=error_schema, errors={CONF_CITY: "location_not_found"}, ) except AirVisualError as err: LOGGER.error(err) return self.async_show_form( step_id=error_step, data_schema=error_schema, errors={"base": "unknown"}, ) valid_keys.add(user_input[CONF_API_KEY]) existing_entry = await self.async_set_unique_id(self._geo_id) if existing_entry: self.hass.config_entries.async_update_entry(existing_entry, data=user_input) return self.async_abort(reason="reauth_successful") return self.async_create_entry( title=f"Cloud API ({self._geo_id})", data={**user_input, CONF_INTEGRATION_TYPE: integration_type}, ) async def _async_init_geography(self, user_input, integration_type): """Handle the initialization of the integration via the cloud API.""" self._geo_id = async_get_geography_id(user_input) await self._async_set_unique_id(self._geo_id) self._abort_if_unique_id_configured() return await self._async_finish_geography(user_input, integration_type) async def _async_set_unique_id(self, unique_id): """Set the unique ID of the config flow and abort if it already exists.""" await self.async_set_unique_id(unique_id) self._abort_if_unique_id_configured() @staticmethod @callback def async_get_options_flow(config_entry): """Define the config flow to handle options.""" return AirVisualOptionsFlowHandler(config_entry) async def async_step_geography_by_coords(self, user_input=None): """Handle the initialization of the cloud API based on latitude/longitude.""" if not user_input: return self.async_show_form( step_id="geography_by_coords", data_schema=self.geography_coords_schema ) return await self._async_init_geography( user_input, INTEGRATION_TYPE_GEOGRAPHY_COORDS ) async def async_step_geography_by_name(self, user_input=None): """Handle the initialization of the cloud API based on city/state/country.""" if not user_input: return self.async_show_form( step_id="geography_by_name", data_schema=GEOGRAPHY_NAME_SCHEMA ) return await self._async_init_geography( user_input, INTEGRATION_TYPE_GEOGRAPHY_NAME ) async def async_step_node_pro(self, user_input=None): """Handle the initialization of the integration with a Node/Pro.""" if not user_input: return self.async_show_form(step_id="node_pro", data_schema=NODE_PRO_SCHEMA) await self._async_set_unique_id(user_input[CONF_IP_ADDRESS]) node = NodeSamba(user_input[CONF_IP_ADDRESS], user_input[CONF_PASSWORD]) try: await node.async_connect() except NodeProError as err: LOGGER.error("Error connecting to Node/Pro unit: %s", err) return self.async_show_form( step_id="node_pro", data_schema=NODE_PRO_SCHEMA, errors={CONF_IP_ADDRESS: "cannot_connect"}, ) await node.async_disconnect() return self.async_create_entry( title=f"Node/Pro ({user_input[CONF_IP_ADDRESS]})", data={**user_input, CONF_INTEGRATION_TYPE: INTEGRATION_TYPE_NODE_PRO}, ) async def async_step_reauth(self, data): """Handle configuration by re-auth.""" self._entry_data_for_reauth = data self._geo_id = async_get_geography_id(data) return await self.async_step_reauth_confirm() async def async_step_reauth_confirm(self, user_input=None): """Handle re-auth completion.""" if not user_input: return self.async_show_form( step_id="reauth_confirm", data_schema=API_KEY_DATA_SCHEMA ) conf = {CONF_API_KEY: user_input[CONF_API_KEY], **self._entry_data_for_reauth} return await self._async_finish_geography( conf, self._entry_data_for_reauth[CONF_INTEGRATION_TYPE] ) async def async_step_user(self, user_input=None): """Handle the start of the config flow.""" if not user_input: return self.async_show_form( step_id="user", data_schema=PICK_INTEGRATION_TYPE_SCHEMA ) if user_input["type"] == INTEGRATION_TYPE_GEOGRAPHY_COORDS: return await self.async_step_geography_by_coords() if user_input["type"] == INTEGRATION_TYPE_GEOGRAPHY_NAME: return await self.async_step_geography_by_name() return await self.async_step_node_pro() class AirVisualOptionsFlowHandler(config_entries.OptionsFlow): """Handle an AirVisual options flow.""" def __init__(self, config_entry): """Initialize.""" self.config_entry = config_entry async def async_step_init(self, user_input=None): """Manage the options.""" if user_input is not None: return self.async_create_entry(title="", data=user_input) return self.async_show_form( step_id="init", data_schema=vol.Schema( { vol.Required( CONF_SHOW_ON_MAP, default=self.config_entry.options.get(CONF_SHOW_ON_MAP), ): bool } ), )
"""Test the Plaato config flow.""" from unittest.mock import patch from pyplaato.models.device import PlaatoDeviceType import pytest from homeassistant import config_entries, data_entry_flow, setup from homeassistant.components.plaato.const import ( CONF_DEVICE_NAME, CONF_DEVICE_TYPE, CONF_USE_WEBHOOK, DOMAIN, ) from homeassistant.const import CONF_SCAN_INTERVAL, CONF_TOKEN, CONF_WEBHOOK_ID from homeassistant.data_entry_flow import RESULT_TYPE_CREATE_ENTRY, RESULT_TYPE_FORM from tests.common import MockConfigEntry BASE_URL = "http://example.com" WEBHOOK_ID = "webhook_id" UNIQUE_ID = "plaato_unique_id" @pytest.fixture(name="webhook_id") def mock_webhook_id(): """Mock webhook_id.""" with patch( "homeassistant.components.webhook.async_generate_id", return_value=WEBHOOK_ID ), patch( "homeassistant.components.webhook.async_generate_url", return_value="hook_id" ): yield async def test_show_config_form(hass): """Test show configuration form.""" await setup.async_setup_component(hass, "persistent_notification", {}) result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) assert result["type"] == "form" assert result["step_id"] == "user" async def test_show_config_form_device_type_airlock(hass): """Test show configuration form.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER}, data={ CONF_DEVICE_TYPE: PlaatoDeviceType.Airlock, CONF_DEVICE_NAME: "device_name", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" assert result["data_schema"].schema.get(CONF_TOKEN) == str assert result["data_schema"].schema.get(CONF_USE_WEBHOOK) == bool async def test_show_config_form_device_type_keg(hass): """Test show configuration form.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER}, data={CONF_DEVICE_TYPE: PlaatoDeviceType.Keg, CONF_DEVICE_NAME: "device_name"}, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" assert result["data_schema"].schema.get(CONF_TOKEN) == str assert result["data_schema"].schema.get(CONF_USE_WEBHOOK) is None async def test_show_config_form_validate_webhook(hass, webhook_id): """Test show configuration form.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "user" result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_DEVICE_TYPE: PlaatoDeviceType.Airlock, CONF_DEVICE_NAME: "device_name", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" hass.config.components.add("cloud") with patch( "homeassistant.components.cloud.async_active_subscription", return_value=True ), patch( "homeassistant.components.cloud.async_create_cloudhook", return_value="https://hooks.nabu.casa/ABCD", ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_TOKEN: "", CONF_USE_WEBHOOK: True, }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "webhook" async def test_show_config_form_validate_token(hass): """Test show configuration form.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_DEVICE_TYPE: PlaatoDeviceType.Keg, CONF_DEVICE_NAME: "device_name", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" with patch("homeassistant.components.plaato.async_setup_entry", return_value=True): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={CONF_TOKEN: "valid_token"} ) assert result["type"] == RESULT_TYPE_CREATE_ENTRY assert result["title"] == PlaatoDeviceType.Keg.name assert result["data"] == { CONF_USE_WEBHOOK: False, CONF_TOKEN: "valid_token", CONF_DEVICE_TYPE: PlaatoDeviceType.Keg, CONF_DEVICE_NAME: "device_name", } async def test_show_config_form_no_cloud_webhook(hass, webhook_id): """Test show configuration form.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_DEVICE_TYPE: PlaatoDeviceType.Airlock, CONF_DEVICE_NAME: "device_name", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_USE_WEBHOOK: True, CONF_TOKEN: "", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "webhook" assert result["errors"] is None async def test_show_config_form_api_method_no_auth_token(hass, webhook_id): """Test show configuration form.""" # Using Keg result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_DEVICE_TYPE: PlaatoDeviceType.Keg, CONF_DEVICE_NAME: "device_name", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={CONF_TOKEN: ""} ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" assert len(result["errors"]) == 1 assert result["errors"]["base"] == "no_auth_token" # Using Airlock result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_DEVICE_TYPE: PlaatoDeviceType.Airlock, CONF_DEVICE_NAME: "device_name", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={CONF_TOKEN: ""} ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" assert len(result["errors"]) == 1 assert result["errors"]["base"] == "no_api_method" async def test_options(hass): """Test updating options.""" config_entry = MockConfigEntry( domain=DOMAIN, title="NAME", data={}, options={CONF_SCAN_INTERVAL: 5}, ) config_entry.add_to_hass(hass) with patch( "homeassistant.components.plaato.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.plaato.async_setup_entry", return_value=True ) as mock_setup_entry: await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() result = await hass.config_entries.options.async_init(config_entry.entry_id) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "user" result = await hass.config_entries.options.async_configure( result["flow_id"], user_input={CONF_SCAN_INTERVAL: 10}, ) await hass.async_block_till_done() assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["data"][CONF_SCAN_INTERVAL] == 10 assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_options_webhook(hass, webhook_id): """Test updating options.""" config_entry = MockConfigEntry( domain=DOMAIN, title="NAME", data={CONF_USE_WEBHOOK: True, CONF_WEBHOOK_ID: None}, options={CONF_SCAN_INTERVAL: 5}, ) config_entry.add_to_hass(hass) with patch( "homeassistant.components.plaato.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.plaato.async_setup_entry", return_value=True ) as mock_setup_entry: await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() result = await hass.config_entries.options.async_init(config_entry.entry_id) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "webhook" assert result["description_placeholders"] == {"webhook_url": ""} result = await hass.config_entries.options.async_configure( result["flow_id"], user_input={CONF_WEBHOOK_ID: WEBHOOK_ID}, ) await hass.async_block_till_done() assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["data"][CONF_WEBHOOK_ID] == CONF_WEBHOOK_ID assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1
w1ll1am23/home-assistant
tests/components/plaato/test_config_flow.py
homeassistant/components/airvisual/config_flow.py
"""Brother helpers functions.""" import logging import pysnmp.hlapi.asyncio as hlapi from pysnmp.hlapi.asyncio.cmdgen import lcd from homeassistant.const import EVENT_HOMEASSISTANT_STOP from homeassistant.core import callback from homeassistant.helpers import singleton from .const import DOMAIN, SNMP _LOGGER = logging.getLogger(__name__) @singleton.singleton("snmp_engine") def get_snmp_engine(hass): """Get SNMP engine.""" _LOGGER.debug("Creating SNMP engine") snmp_engine = hlapi.SnmpEngine() @callback def shutdown_listener(ev): if hass.data.get(DOMAIN): _LOGGER.debug("Unconfiguring SNMP engine") lcd.unconfigure(hass.data[DOMAIN][SNMP], None) hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, shutdown_listener) return snmp_engine
"""Test the Plaato config flow.""" from unittest.mock import patch from pyplaato.models.device import PlaatoDeviceType import pytest from homeassistant import config_entries, data_entry_flow, setup from homeassistant.components.plaato.const import ( CONF_DEVICE_NAME, CONF_DEVICE_TYPE, CONF_USE_WEBHOOK, DOMAIN, ) from homeassistant.const import CONF_SCAN_INTERVAL, CONF_TOKEN, CONF_WEBHOOK_ID from homeassistant.data_entry_flow import RESULT_TYPE_CREATE_ENTRY, RESULT_TYPE_FORM from tests.common import MockConfigEntry BASE_URL = "http://example.com" WEBHOOK_ID = "webhook_id" UNIQUE_ID = "plaato_unique_id" @pytest.fixture(name="webhook_id") def mock_webhook_id(): """Mock webhook_id.""" with patch( "homeassistant.components.webhook.async_generate_id", return_value=WEBHOOK_ID ), patch( "homeassistant.components.webhook.async_generate_url", return_value="hook_id" ): yield async def test_show_config_form(hass): """Test show configuration form.""" await setup.async_setup_component(hass, "persistent_notification", {}) result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) assert result["type"] == "form" assert result["step_id"] == "user" async def test_show_config_form_device_type_airlock(hass): """Test show configuration form.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER}, data={ CONF_DEVICE_TYPE: PlaatoDeviceType.Airlock, CONF_DEVICE_NAME: "device_name", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" assert result["data_schema"].schema.get(CONF_TOKEN) == str assert result["data_schema"].schema.get(CONF_USE_WEBHOOK) == bool async def test_show_config_form_device_type_keg(hass): """Test show configuration form.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER}, data={CONF_DEVICE_TYPE: PlaatoDeviceType.Keg, CONF_DEVICE_NAME: "device_name"}, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" assert result["data_schema"].schema.get(CONF_TOKEN) == str assert result["data_schema"].schema.get(CONF_USE_WEBHOOK) is None async def test_show_config_form_validate_webhook(hass, webhook_id): """Test show configuration form.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "user" result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_DEVICE_TYPE: PlaatoDeviceType.Airlock, CONF_DEVICE_NAME: "device_name", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" hass.config.components.add("cloud") with patch( "homeassistant.components.cloud.async_active_subscription", return_value=True ), patch( "homeassistant.components.cloud.async_create_cloudhook", return_value="https://hooks.nabu.casa/ABCD", ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_TOKEN: "", CONF_USE_WEBHOOK: True, }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "webhook" async def test_show_config_form_validate_token(hass): """Test show configuration form.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_DEVICE_TYPE: PlaatoDeviceType.Keg, CONF_DEVICE_NAME: "device_name", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" with patch("homeassistant.components.plaato.async_setup_entry", return_value=True): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={CONF_TOKEN: "valid_token"} ) assert result["type"] == RESULT_TYPE_CREATE_ENTRY assert result["title"] == PlaatoDeviceType.Keg.name assert result["data"] == { CONF_USE_WEBHOOK: False, CONF_TOKEN: "valid_token", CONF_DEVICE_TYPE: PlaatoDeviceType.Keg, CONF_DEVICE_NAME: "device_name", } async def test_show_config_form_no_cloud_webhook(hass, webhook_id): """Test show configuration form.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_DEVICE_TYPE: PlaatoDeviceType.Airlock, CONF_DEVICE_NAME: "device_name", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_USE_WEBHOOK: True, CONF_TOKEN: "", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "webhook" assert result["errors"] is None async def test_show_config_form_api_method_no_auth_token(hass, webhook_id): """Test show configuration form.""" # Using Keg result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_DEVICE_TYPE: PlaatoDeviceType.Keg, CONF_DEVICE_NAME: "device_name", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={CONF_TOKEN: ""} ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" assert len(result["errors"]) == 1 assert result["errors"]["base"] == "no_auth_token" # Using Airlock result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_DEVICE_TYPE: PlaatoDeviceType.Airlock, CONF_DEVICE_NAME: "device_name", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={CONF_TOKEN: ""} ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" assert len(result["errors"]) == 1 assert result["errors"]["base"] == "no_api_method" async def test_options(hass): """Test updating options.""" config_entry = MockConfigEntry( domain=DOMAIN, title="NAME", data={}, options={CONF_SCAN_INTERVAL: 5}, ) config_entry.add_to_hass(hass) with patch( "homeassistant.components.plaato.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.plaato.async_setup_entry", return_value=True ) as mock_setup_entry: await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() result = await hass.config_entries.options.async_init(config_entry.entry_id) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "user" result = await hass.config_entries.options.async_configure( result["flow_id"], user_input={CONF_SCAN_INTERVAL: 10}, ) await hass.async_block_till_done() assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["data"][CONF_SCAN_INTERVAL] == 10 assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_options_webhook(hass, webhook_id): """Test updating options.""" config_entry = MockConfigEntry( domain=DOMAIN, title="NAME", data={CONF_USE_WEBHOOK: True, CONF_WEBHOOK_ID: None}, options={CONF_SCAN_INTERVAL: 5}, ) config_entry.add_to_hass(hass) with patch( "homeassistant.components.plaato.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.plaato.async_setup_entry", return_value=True ) as mock_setup_entry: await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() result = await hass.config_entries.options.async_init(config_entry.entry_id) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "webhook" assert result["description_placeholders"] == {"webhook_url": ""} result = await hass.config_entries.options.async_configure( result["flow_id"], user_input={CONF_WEBHOOK_ID: WEBHOOK_ID}, ) await hass.async_block_till_done() assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["data"][CONF_WEBHOOK_ID] == CONF_WEBHOOK_ID assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1
w1ll1am23/home-assistant
tests/components/plaato/test_config_flow.py
homeassistant/components/brother/utils.py
"""The DirecTV integration.""" from __future__ import annotations import asyncio from datetime import timedelta from typing import Any from directv import DIRECTV, DIRECTVError from homeassistant.config_entries import ConfigEntry from homeassistant.const import ATTR_NAME, CONF_HOST from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryNotReady from homeassistant.helpers import config_validation as cv from homeassistant.helpers.aiohttp_client import async_get_clientsession from homeassistant.helpers.entity import Entity from .const import ( ATTR_IDENTIFIERS, ATTR_MANUFACTURER, ATTR_MODEL, ATTR_SOFTWARE_VERSION, ATTR_VIA_DEVICE, DOMAIN, ) CONFIG_SCHEMA = cv.deprecated(DOMAIN) PLATFORMS = ["media_player", "remote"] SCAN_INTERVAL = timedelta(seconds=30) async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Set up DirecTV from a config entry.""" dtv = DIRECTV(entry.data[CONF_HOST], session=async_get_clientsession(hass)) try: await dtv.update() except DIRECTVError as err: raise ConfigEntryNotReady from err hass.data.setdefault(DOMAIN, {}) hass.data[DOMAIN][entry.entry_id] = dtv for platform in PLATFORMS: hass.async_create_task( hass.config_entries.async_forward_entry_setup(entry, platform) ) return True async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Unload a config entry.""" unload_ok = all( await asyncio.gather( *[ hass.config_entries.async_forward_entry_unload(entry, platform) for platform in PLATFORMS ] ) ) if unload_ok: hass.data[DOMAIN].pop(entry.entry_id) return unload_ok class DIRECTVEntity(Entity): """Defines a base DirecTV entity.""" def __init__(self, *, dtv: DIRECTV, name: str, address: str = "0") -> None: """Initialize the DirecTV entity.""" self._address = address self._device_id = address if address != "0" else dtv.device.info.receiver_id self._is_client = address != "0" self._name = name self.dtv = dtv @property def name(self) -> str: """Return the name of the entity.""" return self._name @property def device_info(self) -> dict[str, Any]: """Return device information about this DirecTV receiver.""" return { ATTR_IDENTIFIERS: {(DOMAIN, self._device_id)}, ATTR_NAME: self.name, ATTR_MANUFACTURER: self.dtv.device.info.brand, ATTR_MODEL: None, ATTR_SOFTWARE_VERSION: self.dtv.device.info.version, ATTR_VIA_DEVICE: (DOMAIN, self.dtv.device.info.receiver_id), }
"""Test the Plaato config flow.""" from unittest.mock import patch from pyplaato.models.device import PlaatoDeviceType import pytest from homeassistant import config_entries, data_entry_flow, setup from homeassistant.components.plaato.const import ( CONF_DEVICE_NAME, CONF_DEVICE_TYPE, CONF_USE_WEBHOOK, DOMAIN, ) from homeassistant.const import CONF_SCAN_INTERVAL, CONF_TOKEN, CONF_WEBHOOK_ID from homeassistant.data_entry_flow import RESULT_TYPE_CREATE_ENTRY, RESULT_TYPE_FORM from tests.common import MockConfigEntry BASE_URL = "http://example.com" WEBHOOK_ID = "webhook_id" UNIQUE_ID = "plaato_unique_id" @pytest.fixture(name="webhook_id") def mock_webhook_id(): """Mock webhook_id.""" with patch( "homeassistant.components.webhook.async_generate_id", return_value=WEBHOOK_ID ), patch( "homeassistant.components.webhook.async_generate_url", return_value="hook_id" ): yield async def test_show_config_form(hass): """Test show configuration form.""" await setup.async_setup_component(hass, "persistent_notification", {}) result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) assert result["type"] == "form" assert result["step_id"] == "user" async def test_show_config_form_device_type_airlock(hass): """Test show configuration form.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER}, data={ CONF_DEVICE_TYPE: PlaatoDeviceType.Airlock, CONF_DEVICE_NAME: "device_name", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" assert result["data_schema"].schema.get(CONF_TOKEN) == str assert result["data_schema"].schema.get(CONF_USE_WEBHOOK) == bool async def test_show_config_form_device_type_keg(hass): """Test show configuration form.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER}, data={CONF_DEVICE_TYPE: PlaatoDeviceType.Keg, CONF_DEVICE_NAME: "device_name"}, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" assert result["data_schema"].schema.get(CONF_TOKEN) == str assert result["data_schema"].schema.get(CONF_USE_WEBHOOK) is None async def test_show_config_form_validate_webhook(hass, webhook_id): """Test show configuration form.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "user" result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_DEVICE_TYPE: PlaatoDeviceType.Airlock, CONF_DEVICE_NAME: "device_name", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" hass.config.components.add("cloud") with patch( "homeassistant.components.cloud.async_active_subscription", return_value=True ), patch( "homeassistant.components.cloud.async_create_cloudhook", return_value="https://hooks.nabu.casa/ABCD", ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_TOKEN: "", CONF_USE_WEBHOOK: True, }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "webhook" async def test_show_config_form_validate_token(hass): """Test show configuration form.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_DEVICE_TYPE: PlaatoDeviceType.Keg, CONF_DEVICE_NAME: "device_name", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" with patch("homeassistant.components.plaato.async_setup_entry", return_value=True): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={CONF_TOKEN: "valid_token"} ) assert result["type"] == RESULT_TYPE_CREATE_ENTRY assert result["title"] == PlaatoDeviceType.Keg.name assert result["data"] == { CONF_USE_WEBHOOK: False, CONF_TOKEN: "valid_token", CONF_DEVICE_TYPE: PlaatoDeviceType.Keg, CONF_DEVICE_NAME: "device_name", } async def test_show_config_form_no_cloud_webhook(hass, webhook_id): """Test show configuration form.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_DEVICE_TYPE: PlaatoDeviceType.Airlock, CONF_DEVICE_NAME: "device_name", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_USE_WEBHOOK: True, CONF_TOKEN: "", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "webhook" assert result["errors"] is None async def test_show_config_form_api_method_no_auth_token(hass, webhook_id): """Test show configuration form.""" # Using Keg result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_DEVICE_TYPE: PlaatoDeviceType.Keg, CONF_DEVICE_NAME: "device_name", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={CONF_TOKEN: ""} ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" assert len(result["errors"]) == 1 assert result["errors"]["base"] == "no_auth_token" # Using Airlock result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_DEVICE_TYPE: PlaatoDeviceType.Airlock, CONF_DEVICE_NAME: "device_name", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={CONF_TOKEN: ""} ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" assert len(result["errors"]) == 1 assert result["errors"]["base"] == "no_api_method" async def test_options(hass): """Test updating options.""" config_entry = MockConfigEntry( domain=DOMAIN, title="NAME", data={}, options={CONF_SCAN_INTERVAL: 5}, ) config_entry.add_to_hass(hass) with patch( "homeassistant.components.plaato.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.plaato.async_setup_entry", return_value=True ) as mock_setup_entry: await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() result = await hass.config_entries.options.async_init(config_entry.entry_id) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "user" result = await hass.config_entries.options.async_configure( result["flow_id"], user_input={CONF_SCAN_INTERVAL: 10}, ) await hass.async_block_till_done() assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["data"][CONF_SCAN_INTERVAL] == 10 assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_options_webhook(hass, webhook_id): """Test updating options.""" config_entry = MockConfigEntry( domain=DOMAIN, title="NAME", data={CONF_USE_WEBHOOK: True, CONF_WEBHOOK_ID: None}, options={CONF_SCAN_INTERVAL: 5}, ) config_entry.add_to_hass(hass) with patch( "homeassistant.components.plaato.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.plaato.async_setup_entry", return_value=True ) as mock_setup_entry: await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() result = await hass.config_entries.options.async_init(config_entry.entry_id) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "webhook" assert result["description_placeholders"] == {"webhook_url": ""} result = await hass.config_entries.options.async_configure( result["flow_id"], user_input={CONF_WEBHOOK_ID: WEBHOOK_ID}, ) await hass.async_block_till_done() assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["data"][CONF_WEBHOOK_ID] == CONF_WEBHOOK_ID assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1
w1ll1am23/home-assistant
tests/components/plaato/test_config_flow.py
homeassistant/components/directv/__init__.py
"""Support for interface with a Gree climate systems.""" from __future__ import annotations from homeassistant.components.switch import DEVICE_CLASS_SWITCH, SwitchEntity from homeassistant.helpers.device_registry import CONNECTION_NETWORK_MAC from homeassistant.helpers.update_coordinator import CoordinatorEntity from .const import COORDINATOR, DOMAIN async def async_setup_entry(hass, config_entry, async_add_entities): """Set up the Gree HVAC device from a config entry.""" async_add_entities( [ GreeSwitchEntity(coordinator) for coordinator in hass.data[DOMAIN][COORDINATOR] ] ) class GreeSwitchEntity(CoordinatorEntity, SwitchEntity): """Representation of a Gree HVAC device.""" def __init__(self, coordinator): """Initialize the Gree device.""" super().__init__(coordinator) self._name = coordinator.device.device_info.name + " Panel Light" self._mac = coordinator.device.device_info.mac @property def name(self) -> str: """Return the name of the device.""" return self._name @property def unique_id(self) -> str: """Return a unique id for the device.""" return f"{self._mac}-panel-light" @property def icon(self) -> str | None: """Return the icon for the device.""" return "mdi:lightbulb" @property def device_info(self): """Return device specific attributes.""" return { "name": self._name, "identifiers": {(DOMAIN, self._mac)}, "manufacturer": "Gree", "connections": {(CONNECTION_NETWORK_MAC, self._mac)}, } @property def device_class(self): """Return the class of this device, from component DEVICE_CLASSES.""" return DEVICE_CLASS_SWITCH @property def is_on(self) -> bool: """Return if the light is turned on.""" return self.coordinator.device.light async def async_turn_on(self, **kwargs): """Turn the entity on.""" self.coordinator.device.light = True await self.coordinator.push_state_update() self.async_write_ha_state() async def async_turn_off(self, **kwargs): """Turn the entity off.""" self.coordinator.device.light = False await self.coordinator.push_state_update() self.async_write_ha_state()
"""Test the Plaato config flow.""" from unittest.mock import patch from pyplaato.models.device import PlaatoDeviceType import pytest from homeassistant import config_entries, data_entry_flow, setup from homeassistant.components.plaato.const import ( CONF_DEVICE_NAME, CONF_DEVICE_TYPE, CONF_USE_WEBHOOK, DOMAIN, ) from homeassistant.const import CONF_SCAN_INTERVAL, CONF_TOKEN, CONF_WEBHOOK_ID from homeassistant.data_entry_flow import RESULT_TYPE_CREATE_ENTRY, RESULT_TYPE_FORM from tests.common import MockConfigEntry BASE_URL = "http://example.com" WEBHOOK_ID = "webhook_id" UNIQUE_ID = "plaato_unique_id" @pytest.fixture(name="webhook_id") def mock_webhook_id(): """Mock webhook_id.""" with patch( "homeassistant.components.webhook.async_generate_id", return_value=WEBHOOK_ID ), patch( "homeassistant.components.webhook.async_generate_url", return_value="hook_id" ): yield async def test_show_config_form(hass): """Test show configuration form.""" await setup.async_setup_component(hass, "persistent_notification", {}) result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) assert result["type"] == "form" assert result["step_id"] == "user" async def test_show_config_form_device_type_airlock(hass): """Test show configuration form.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER}, data={ CONF_DEVICE_TYPE: PlaatoDeviceType.Airlock, CONF_DEVICE_NAME: "device_name", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" assert result["data_schema"].schema.get(CONF_TOKEN) == str assert result["data_schema"].schema.get(CONF_USE_WEBHOOK) == bool async def test_show_config_form_device_type_keg(hass): """Test show configuration form.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER}, data={CONF_DEVICE_TYPE: PlaatoDeviceType.Keg, CONF_DEVICE_NAME: "device_name"}, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" assert result["data_schema"].schema.get(CONF_TOKEN) == str assert result["data_schema"].schema.get(CONF_USE_WEBHOOK) is None async def test_show_config_form_validate_webhook(hass, webhook_id): """Test show configuration form.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "user" result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_DEVICE_TYPE: PlaatoDeviceType.Airlock, CONF_DEVICE_NAME: "device_name", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" hass.config.components.add("cloud") with patch( "homeassistant.components.cloud.async_active_subscription", return_value=True ), patch( "homeassistant.components.cloud.async_create_cloudhook", return_value="https://hooks.nabu.casa/ABCD", ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_TOKEN: "", CONF_USE_WEBHOOK: True, }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "webhook" async def test_show_config_form_validate_token(hass): """Test show configuration form.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_DEVICE_TYPE: PlaatoDeviceType.Keg, CONF_DEVICE_NAME: "device_name", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" with patch("homeassistant.components.plaato.async_setup_entry", return_value=True): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={CONF_TOKEN: "valid_token"} ) assert result["type"] == RESULT_TYPE_CREATE_ENTRY assert result["title"] == PlaatoDeviceType.Keg.name assert result["data"] == { CONF_USE_WEBHOOK: False, CONF_TOKEN: "valid_token", CONF_DEVICE_TYPE: PlaatoDeviceType.Keg, CONF_DEVICE_NAME: "device_name", } async def test_show_config_form_no_cloud_webhook(hass, webhook_id): """Test show configuration form.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_DEVICE_TYPE: PlaatoDeviceType.Airlock, CONF_DEVICE_NAME: "device_name", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_USE_WEBHOOK: True, CONF_TOKEN: "", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "webhook" assert result["errors"] is None async def test_show_config_form_api_method_no_auth_token(hass, webhook_id): """Test show configuration form.""" # Using Keg result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_DEVICE_TYPE: PlaatoDeviceType.Keg, CONF_DEVICE_NAME: "device_name", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={CONF_TOKEN: ""} ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" assert len(result["errors"]) == 1 assert result["errors"]["base"] == "no_auth_token" # Using Airlock result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_DEVICE_TYPE: PlaatoDeviceType.Airlock, CONF_DEVICE_NAME: "device_name", }, ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={CONF_TOKEN: ""} ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "api_method" assert len(result["errors"]) == 1 assert result["errors"]["base"] == "no_api_method" async def test_options(hass): """Test updating options.""" config_entry = MockConfigEntry( domain=DOMAIN, title="NAME", data={}, options={CONF_SCAN_INTERVAL: 5}, ) config_entry.add_to_hass(hass) with patch( "homeassistant.components.plaato.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.plaato.async_setup_entry", return_value=True ) as mock_setup_entry: await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() result = await hass.config_entries.options.async_init(config_entry.entry_id) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "user" result = await hass.config_entries.options.async_configure( result["flow_id"], user_input={CONF_SCAN_INTERVAL: 10}, ) await hass.async_block_till_done() assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["data"][CONF_SCAN_INTERVAL] == 10 assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 async def test_options_webhook(hass, webhook_id): """Test updating options.""" config_entry = MockConfigEntry( domain=DOMAIN, title="NAME", data={CONF_USE_WEBHOOK: True, CONF_WEBHOOK_ID: None}, options={CONF_SCAN_INTERVAL: 5}, ) config_entry.add_to_hass(hass) with patch( "homeassistant.components.plaato.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.plaato.async_setup_entry", return_value=True ) as mock_setup_entry: await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() result = await hass.config_entries.options.async_init(config_entry.entry_id) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "webhook" assert result["description_placeholders"] == {"webhook_url": ""} result = await hass.config_entries.options.async_configure( result["flow_id"], user_input={CONF_WEBHOOK_ID: WEBHOOK_ID}, ) await hass.async_block_till_done() assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["data"][CONF_WEBHOOK_ID] == CONF_WEBHOOK_ID assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1
w1ll1am23/home-assistant
tests/components/plaato/test_config_flow.py
homeassistant/components/gree/switch.py