input
stringlengths
53
297k
output
stringclasses
604 values
repo_name
stringclasses
376 values
test_path
stringclasses
583 values
code_path
stringlengths
7
116
"""The Compensation integration.""" import logging import warnings import numpy as np import voluptuous as vol from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN from homeassistant.const import ( CONF_ATTRIBUTE, CONF_SOURCE, CONF_UNIQUE_ID, CONF_UNIT_OF_MEASUREMENT, ) from homeassistant.helpers import config_validation as cv from homeassistant.helpers.discovery import async_load_platform from .const import ( CONF_COMPENSATION, CONF_DATAPOINTS, CONF_DEGREE, CONF_POLYNOMIAL, CONF_PRECISION, DATA_COMPENSATION, DEFAULT_DEGREE, DEFAULT_PRECISION, DOMAIN, ) _LOGGER = logging.getLogger(__name__) def datapoints_greater_than_degree(value: dict) -> dict: """Validate data point list is greater than polynomial degrees.""" if len(value[CONF_DATAPOINTS]) <= value[CONF_DEGREE]: raise vol.Invalid( f"{CONF_DATAPOINTS} must have at least {value[CONF_DEGREE]+1} {CONF_DATAPOINTS}" ) return value COMPENSATION_SCHEMA = vol.Schema( { vol.Required(CONF_SOURCE): cv.entity_id, vol.Required(CONF_DATAPOINTS): [ vol.ExactSequence([vol.Coerce(float), vol.Coerce(float)]) ], vol.Optional(CONF_UNIQUE_ID): cv.string, vol.Optional(CONF_ATTRIBUTE): cv.string, vol.Optional(CONF_PRECISION, default=DEFAULT_PRECISION): cv.positive_int, vol.Optional(CONF_DEGREE, default=DEFAULT_DEGREE): vol.All( vol.Coerce(int), vol.Range(min=1, max=7), ), vol.Optional(CONF_UNIT_OF_MEASUREMENT): cv.string, } ) CONFIG_SCHEMA = vol.Schema( { DOMAIN: vol.Schema( {cv.slug: vol.All(COMPENSATION_SCHEMA, datapoints_greater_than_degree)} ) }, extra=vol.ALLOW_EXTRA, ) async def async_setup(hass, config): """Set up the Compensation sensor.""" hass.data[DATA_COMPENSATION] = {} for compensation, conf in config.get(DOMAIN).items(): _LOGGER.debug("Setup %s.%s", DOMAIN, compensation) degree = conf[CONF_DEGREE] # get x values and y values from the x,y point pairs x_values, y_values = zip(*conf[CONF_DATAPOINTS]) # try to get valid coefficients for a polynomial coefficients = None with np.errstate(all="raise"): with warnings.catch_warnings(record=True) as all_warnings: warnings.simplefilter("always") try: coefficients = np.polyfit(x_values, y_values, degree) except FloatingPointError as error: _LOGGER.error( "Setup of %s encountered an error, %s", compensation, error, ) for warning in all_warnings: _LOGGER.warning( "Setup of %s encountered a warning, %s", compensation, str(warning.message).lower(), ) if coefficients is not None: data = { k: v for k, v in conf.items() if k not in [CONF_DEGREE, CONF_DATAPOINTS] } data[CONF_POLYNOMIAL] = np.poly1d(coefficients) hass.data[DATA_COMPENSATION][compensation] = data hass.async_create_task( async_load_platform( hass, SENSOR_DOMAIN, DOMAIN, {CONF_COMPENSATION: compensation}, config, ) ) return True
"""Test Home Assistant config flow for BleBox devices.""" from unittest.mock import DEFAULT, AsyncMock, PropertyMock, patch import blebox_uniapi import pytest from homeassistant import config_entries, data_entry_flow from homeassistant.components.blebox import config_flow from homeassistant.setup import async_setup_component from .conftest import mock_config, mock_only_feature, setup_product_mock def create_valid_feature_mock(path="homeassistant.components.blebox.Products"): """Return a valid, complete BleBox feature mock.""" feature = mock_only_feature( blebox_uniapi.cover.Cover, unique_id="BleBox-gateBox-1afe34db9437-0.position", full_name="gateBox-0.position", device_class="gate", state=0, async_update=AsyncMock(), current=None, ) product = setup_product_mock("covers", [feature], path) type(product).name = PropertyMock(return_value="My gate controller") type(product).model = PropertyMock(return_value="gateController") type(product).type = PropertyMock(return_value="gateBox") type(product).brand = PropertyMock(return_value="BleBox") type(product).firmware_version = PropertyMock(return_value="1.23") type(product).unique_id = PropertyMock(return_value="abcd0123ef5678") return feature @pytest.fixture(name="valid_feature_mock") def valid_feature_mock_fixture(): """Return a valid, complete BleBox feature mock.""" return create_valid_feature_mock() @pytest.fixture(name="flow_feature_mock") def flow_feature_mock_fixture(): """Return a mocked user flow feature.""" return create_valid_feature_mock( "homeassistant.components.blebox.config_flow.Products" ) async def test_flow_works(hass, valid_feature_mock, flow_feature_mock): """Test that config flow works.""" result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER} ) assert result["type"] == "form" assert result["step_id"] == "user" result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER}, data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80}, ) assert result["type"] == "create_entry" assert result["title"] == "My gate controller" assert result["data"] == { config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80, } @pytest.fixture(name="product_class_mock") def product_class_mock_fixture(): """Return a mocked feature.""" path = "homeassistant.components.blebox.config_flow.Products" patcher = patch(path, DEFAULT, blebox_uniapi.products.Products, True, True) yield patcher async def test_flow_with_connection_failure(hass, product_class_mock): """Test that config flow works.""" with product_class_mock as products_class: products_class.async_from_host = AsyncMock( side_effect=blebox_uniapi.error.ConnectionError ) result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER}, data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80}, ) assert result["errors"] == {"base": "cannot_connect"} async def test_flow_with_api_failure(hass, product_class_mock): """Test that config flow works.""" with product_class_mock as products_class: products_class.async_from_host = AsyncMock( side_effect=blebox_uniapi.error.Error ) result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER}, data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80}, ) assert result["errors"] == {"base": "cannot_connect"} async def test_flow_with_unknown_failure(hass, product_class_mock): """Test that config flow works.""" with product_class_mock as products_class: products_class.async_from_host = AsyncMock(side_effect=RuntimeError) result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER}, data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80}, ) assert result["errors"] == {"base": "unknown"} async def test_flow_with_unsupported_version(hass, product_class_mock): """Test that config flow works.""" with product_class_mock as products_class: products_class.async_from_host = AsyncMock( side_effect=blebox_uniapi.error.UnsupportedBoxVersion ) result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER}, data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80}, ) assert result["errors"] == {"base": "unsupported_version"} async def test_async_setup(hass): """Test async_setup (for coverage).""" assert await async_setup_component(hass, "blebox", {"host": "172.2.3.4"}) await hass.async_block_till_done() async def test_already_configured(hass, valid_feature_mock): """Test that same device cannot be added twice.""" config = mock_config("172.2.3.4") config.add_to_hass(hass) await hass.config_entries.async_setup(config.entry_id) await hass.async_block_till_done() result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER}, data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80}, ) assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT assert result["reason"] == "address_already_configured" async def test_async_setup_entry(hass, valid_feature_mock): """Test async_setup_entry (for coverage).""" config = mock_config() config.add_to_hass(hass) assert await hass.config_entries.async_setup(config.entry_id) await hass.async_block_till_done() assert hass.config_entries.async_entries() == [config] assert config.state is config_entries.ConfigEntryState.LOADED async def test_async_remove_entry(hass, valid_feature_mock): """Test async_setup_entry (for coverage).""" config = mock_config() config.add_to_hass(hass) assert await hass.config_entries.async_setup(config.entry_id) await hass.async_block_till_done() assert await hass.config_entries.async_remove(config.entry_id) await hass.async_block_till_done() assert hass.config_entries.async_entries() == [] assert config.state is config_entries.ConfigEntryState.NOT_LOADED
home-assistant/home-assistant
tests/components/blebox/test_config_flow.py
homeassistant/components/compensation/__init__.py
"""Support for Tado sensors for each zone.""" import logging from homeassistant.components.sensor import SensorEntity from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( DEVICE_CLASS_HUMIDITY, DEVICE_CLASS_TEMPERATURE, PERCENTAGE, TEMP_CELSIUS, ) from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.dispatcher import async_dispatcher_connect from .const import ( CONDITIONS_MAP, DATA, DOMAIN, SIGNAL_TADO_UPDATE_RECEIVED, TYPE_AIR_CONDITIONING, TYPE_HEATING, TYPE_HOT_WATER, ) from .entity import TadoHomeEntity, TadoZoneEntity _LOGGER = logging.getLogger(__name__) HOME_SENSORS = { "outdoor temperature", "solar percentage", "weather condition", } ZONE_SENSORS = { TYPE_HEATING: [ "temperature", "humidity", "heating", "tado mode", ], TYPE_AIR_CONDITIONING: [ "temperature", "humidity", "ac", "tado mode", ], TYPE_HOT_WATER: ["tado mode"], } def format_condition(condition: str) -> str: """Return condition from dict CONDITIONS_MAP.""" for key, value in CONDITIONS_MAP.items(): if condition in value: return key return condition async def async_setup_entry( hass: HomeAssistant, entry: ConfigEntry, async_add_entities ): """Set up the Tado sensor platform.""" tado = hass.data[DOMAIN][entry.entry_id][DATA] zones = tado.zones entities = [] # Create home sensors entities.extend([TadoHomeSensor(tado, variable) for variable in HOME_SENSORS]) # Create zone sensors for zone in zones: zone_type = zone["type"] if zone_type not in ZONE_SENSORS: _LOGGER.warning("Unknown zone type skipped: %s", zone_type) continue entities.extend( [ TadoZoneSensor(tado, zone["name"], zone["id"], variable) for variable in ZONE_SENSORS[zone_type] ] ) if entities: async_add_entities(entities, True) class TadoHomeSensor(TadoHomeEntity, SensorEntity): """Representation of a Tado Sensor.""" def __init__(self, tado, home_variable): """Initialize of the Tado Sensor.""" super().__init__(tado) self._tado = tado self.home_variable = home_variable self._unique_id = f"{home_variable} {tado.home_id}" self._state = None self._state_attributes = None self._tado_weather_data = self._tado.data["weather"] async def async_added_to_hass(self): """Register for sensor updates.""" self.async_on_remove( async_dispatcher_connect( self.hass, SIGNAL_TADO_UPDATE_RECEIVED.format( self._tado.home_id, "weather", "data" ), self._async_update_callback, ) ) self._async_update_home_data() @property def unique_id(self): """Return the unique id.""" return self._unique_id @property def name(self): """Return the name of the sensor.""" return f"{self._tado.home_name} {self.home_variable}" @property def state(self): """Return the state of the sensor.""" return self._state @property def extra_state_attributes(self): """Return the state attributes.""" return self._state_attributes @property def unit_of_measurement(self): """Return the unit of measurement.""" if self.home_variable == "temperature": return TEMP_CELSIUS if self.home_variable == "solar percentage": return PERCENTAGE if self.home_variable == "weather condition": return None @property def device_class(self): """Return the device class.""" if self.home_variable == "outdoor temperature": return DEVICE_CLASS_TEMPERATURE return None @callback def _async_update_callback(self): """Update and write state.""" self._async_update_home_data() self.async_write_ha_state() @callback def _async_update_home_data(self): """Handle update callbacks.""" try: self._tado_weather_data = self._tado.data["weather"] except KeyError: return if self.home_variable == "outdoor temperature": self._state = self.hass.config.units.temperature( self._tado_weather_data["outsideTemperature"]["celsius"], TEMP_CELSIUS, ) self._state_attributes = { "time": self._tado_weather_data["outsideTemperature"]["timestamp"], } elif self.home_variable == "solar percentage": self._state = self._tado_weather_data["solarIntensity"]["percentage"] self._state_attributes = { "time": self._tado_weather_data["solarIntensity"]["timestamp"], } elif self.home_variable == "weather condition": self._state = format_condition( self._tado_weather_data["weatherState"]["value"] ) self._state_attributes = { "time": self._tado_weather_data["weatherState"]["timestamp"] } class TadoZoneSensor(TadoZoneEntity, SensorEntity): """Representation of a tado Sensor.""" def __init__(self, tado, zone_name, zone_id, zone_variable): """Initialize of the Tado Sensor.""" self._tado = tado super().__init__(zone_name, tado.home_id, zone_id) self.zone_variable = zone_variable self._unique_id = f"{zone_variable} {zone_id} {tado.home_id}" self._state = None self._state_attributes = None self._tado_zone_data = None async def async_added_to_hass(self): """Register for sensor updates.""" self.async_on_remove( async_dispatcher_connect( self.hass, SIGNAL_TADO_UPDATE_RECEIVED.format( self._tado.home_id, "zone", self.zone_id ), self._async_update_callback, ) ) self._async_update_zone_data() @property def unique_id(self): """Return the unique id.""" return self._unique_id @property def name(self): """Return the name of the sensor.""" return f"{self.zone_name} {self.zone_variable}" @property def state(self): """Return the state of the sensor.""" return self._state @property def extra_state_attributes(self): """Return the state attributes.""" return self._state_attributes @property def unit_of_measurement(self): """Return the unit of measurement.""" if self.zone_variable == "temperature": return self.hass.config.units.temperature_unit if self.zone_variable == "humidity": return PERCENTAGE if self.zone_variable == "heating": return PERCENTAGE if self.zone_variable == "ac": return None @property def device_class(self): """Return the device class.""" if self.zone_variable == "humidity": return DEVICE_CLASS_HUMIDITY if self.zone_variable == "temperature": return DEVICE_CLASS_TEMPERATURE return None @callback def _async_update_callback(self): """Update and write state.""" self._async_update_zone_data() self.async_write_ha_state() @callback def _async_update_zone_data(self): """Handle update callbacks.""" try: self._tado_zone_data = self._tado.data["zone"][self.zone_id] except KeyError: return if self.zone_variable == "temperature": self._state = self.hass.config.units.temperature( self._tado_zone_data.current_temp, TEMP_CELSIUS ) self._state_attributes = { "time": self._tado_zone_data.current_temp_timestamp, "setting": 0, # setting is used in climate device } elif self.zone_variable == "humidity": self._state = self._tado_zone_data.current_humidity self._state_attributes = { "time": self._tado_zone_data.current_humidity_timestamp } elif self.zone_variable == "heating": self._state = self._tado_zone_data.heating_power_percentage self._state_attributes = { "time": self._tado_zone_data.heating_power_timestamp } elif self.zone_variable == "ac": self._state = self._tado_zone_data.ac_power self._state_attributes = {"time": self._tado_zone_data.ac_power_timestamp} elif self.zone_variable == "tado mode": self._state = self._tado_zone_data.tado_mode
"""Test Home Assistant config flow for BleBox devices.""" from unittest.mock import DEFAULT, AsyncMock, PropertyMock, patch import blebox_uniapi import pytest from homeassistant import config_entries, data_entry_flow from homeassistant.components.blebox import config_flow from homeassistant.setup import async_setup_component from .conftest import mock_config, mock_only_feature, setup_product_mock def create_valid_feature_mock(path="homeassistant.components.blebox.Products"): """Return a valid, complete BleBox feature mock.""" feature = mock_only_feature( blebox_uniapi.cover.Cover, unique_id="BleBox-gateBox-1afe34db9437-0.position", full_name="gateBox-0.position", device_class="gate", state=0, async_update=AsyncMock(), current=None, ) product = setup_product_mock("covers", [feature], path) type(product).name = PropertyMock(return_value="My gate controller") type(product).model = PropertyMock(return_value="gateController") type(product).type = PropertyMock(return_value="gateBox") type(product).brand = PropertyMock(return_value="BleBox") type(product).firmware_version = PropertyMock(return_value="1.23") type(product).unique_id = PropertyMock(return_value="abcd0123ef5678") return feature @pytest.fixture(name="valid_feature_mock") def valid_feature_mock_fixture(): """Return a valid, complete BleBox feature mock.""" return create_valid_feature_mock() @pytest.fixture(name="flow_feature_mock") def flow_feature_mock_fixture(): """Return a mocked user flow feature.""" return create_valid_feature_mock( "homeassistant.components.blebox.config_flow.Products" ) async def test_flow_works(hass, valid_feature_mock, flow_feature_mock): """Test that config flow works.""" result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER} ) assert result["type"] == "form" assert result["step_id"] == "user" result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER}, data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80}, ) assert result["type"] == "create_entry" assert result["title"] == "My gate controller" assert result["data"] == { config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80, } @pytest.fixture(name="product_class_mock") def product_class_mock_fixture(): """Return a mocked feature.""" path = "homeassistant.components.blebox.config_flow.Products" patcher = patch(path, DEFAULT, blebox_uniapi.products.Products, True, True) yield patcher async def test_flow_with_connection_failure(hass, product_class_mock): """Test that config flow works.""" with product_class_mock as products_class: products_class.async_from_host = AsyncMock( side_effect=blebox_uniapi.error.ConnectionError ) result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER}, data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80}, ) assert result["errors"] == {"base": "cannot_connect"} async def test_flow_with_api_failure(hass, product_class_mock): """Test that config flow works.""" with product_class_mock as products_class: products_class.async_from_host = AsyncMock( side_effect=blebox_uniapi.error.Error ) result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER}, data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80}, ) assert result["errors"] == {"base": "cannot_connect"} async def test_flow_with_unknown_failure(hass, product_class_mock): """Test that config flow works.""" with product_class_mock as products_class: products_class.async_from_host = AsyncMock(side_effect=RuntimeError) result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER}, data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80}, ) assert result["errors"] == {"base": "unknown"} async def test_flow_with_unsupported_version(hass, product_class_mock): """Test that config flow works.""" with product_class_mock as products_class: products_class.async_from_host = AsyncMock( side_effect=blebox_uniapi.error.UnsupportedBoxVersion ) result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER}, data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80}, ) assert result["errors"] == {"base": "unsupported_version"} async def test_async_setup(hass): """Test async_setup (for coverage).""" assert await async_setup_component(hass, "blebox", {"host": "172.2.3.4"}) await hass.async_block_till_done() async def test_already_configured(hass, valid_feature_mock): """Test that same device cannot be added twice.""" config = mock_config("172.2.3.4") config.add_to_hass(hass) await hass.config_entries.async_setup(config.entry_id) await hass.async_block_till_done() result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER}, data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80}, ) assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT assert result["reason"] == "address_already_configured" async def test_async_setup_entry(hass, valid_feature_mock): """Test async_setup_entry (for coverage).""" config = mock_config() config.add_to_hass(hass) assert await hass.config_entries.async_setup(config.entry_id) await hass.async_block_till_done() assert hass.config_entries.async_entries() == [config] assert config.state is config_entries.ConfigEntryState.LOADED async def test_async_remove_entry(hass, valid_feature_mock): """Test async_setup_entry (for coverage).""" config = mock_config() config.add_to_hass(hass) assert await hass.config_entries.async_setup(config.entry_id) await hass.async_block_till_done() assert await hass.config_entries.async_remove(config.entry_id) await hass.async_block_till_done() assert hass.config_entries.async_entries() == [] assert config.state is config_entries.ConfigEntryState.NOT_LOADED
home-assistant/home-assistant
tests/components/blebox/test_config_flow.py
homeassistant/components/tado/sensor.py
"""The JuiceNet integration.""" from datetime import timedelta import logging import aiohttp from pyjuicenet import Api, TokenError import voluptuous as vol from homeassistant.config_entries import SOURCE_IMPORT, ConfigEntry from homeassistant.const import CONF_ACCESS_TOKEN from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryNotReady from homeassistant.helpers import config_validation as cv from homeassistant.helpers.aiohttp_client import async_get_clientsession from homeassistant.helpers.update_coordinator import DataUpdateCoordinator from .const import DOMAIN, JUICENET_API, JUICENET_COORDINATOR from .device import JuiceNetApi _LOGGER = logging.getLogger(__name__) PLATFORMS = ["sensor", "switch"] CONFIG_SCHEMA = vol.Schema( vol.All( cv.deprecated(DOMAIN), {DOMAIN: vol.Schema({vol.Required(CONF_ACCESS_TOKEN): cv.string})}, ), extra=vol.ALLOW_EXTRA, ) async def async_setup(hass: HomeAssistant, config: dict): """Set up the JuiceNet component.""" conf = config.get(DOMAIN) hass.data.setdefault(DOMAIN, {}) if not conf: return True hass.async_create_task( hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_IMPORT}, data=conf ) ) return True async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry): """Set up JuiceNet from a config entry.""" config = entry.data session = async_get_clientsession(hass) access_token = config[CONF_ACCESS_TOKEN] api = Api(access_token, session) juicenet = JuiceNetApi(api) try: await juicenet.setup() except TokenError as error: _LOGGER.error("JuiceNet Error %s", error) return False except aiohttp.ClientError as error: _LOGGER.error("Could not reach the JuiceNet API %s", error) raise ConfigEntryNotReady from error if not juicenet.devices: _LOGGER.error("No JuiceNet devices found for this account") return False _LOGGER.info("%d JuiceNet device(s) found", len(juicenet.devices)) async def async_update_data(): """Update all device states from the JuiceNet API.""" for device in juicenet.devices: await device.update_state(True) return True coordinator = DataUpdateCoordinator( hass, _LOGGER, name="JuiceNet", update_method=async_update_data, update_interval=timedelta(seconds=30), ) hass.data[DOMAIN][entry.entry_id] = { JUICENET_API: juicenet, JUICENET_COORDINATOR: coordinator, } await coordinator.async_config_entry_first_refresh() hass.config_entries.async_setup_platforms(entry, PLATFORMS) return True async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry): """Unload a config entry.""" unload_ok = await hass.config_entries.async_unload_platforms(entry, PLATFORMS) if unload_ok: hass.data[DOMAIN].pop(entry.entry_id) return unload_ok
"""Test Home Assistant config flow for BleBox devices.""" from unittest.mock import DEFAULT, AsyncMock, PropertyMock, patch import blebox_uniapi import pytest from homeassistant import config_entries, data_entry_flow from homeassistant.components.blebox import config_flow from homeassistant.setup import async_setup_component from .conftest import mock_config, mock_only_feature, setup_product_mock def create_valid_feature_mock(path="homeassistant.components.blebox.Products"): """Return a valid, complete BleBox feature mock.""" feature = mock_only_feature( blebox_uniapi.cover.Cover, unique_id="BleBox-gateBox-1afe34db9437-0.position", full_name="gateBox-0.position", device_class="gate", state=0, async_update=AsyncMock(), current=None, ) product = setup_product_mock("covers", [feature], path) type(product).name = PropertyMock(return_value="My gate controller") type(product).model = PropertyMock(return_value="gateController") type(product).type = PropertyMock(return_value="gateBox") type(product).brand = PropertyMock(return_value="BleBox") type(product).firmware_version = PropertyMock(return_value="1.23") type(product).unique_id = PropertyMock(return_value="abcd0123ef5678") return feature @pytest.fixture(name="valid_feature_mock") def valid_feature_mock_fixture(): """Return a valid, complete BleBox feature mock.""" return create_valid_feature_mock() @pytest.fixture(name="flow_feature_mock") def flow_feature_mock_fixture(): """Return a mocked user flow feature.""" return create_valid_feature_mock( "homeassistant.components.blebox.config_flow.Products" ) async def test_flow_works(hass, valid_feature_mock, flow_feature_mock): """Test that config flow works.""" result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER} ) assert result["type"] == "form" assert result["step_id"] == "user" result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER}, data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80}, ) assert result["type"] == "create_entry" assert result["title"] == "My gate controller" assert result["data"] == { config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80, } @pytest.fixture(name="product_class_mock") def product_class_mock_fixture(): """Return a mocked feature.""" path = "homeassistant.components.blebox.config_flow.Products" patcher = patch(path, DEFAULT, blebox_uniapi.products.Products, True, True) yield patcher async def test_flow_with_connection_failure(hass, product_class_mock): """Test that config flow works.""" with product_class_mock as products_class: products_class.async_from_host = AsyncMock( side_effect=blebox_uniapi.error.ConnectionError ) result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER}, data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80}, ) assert result["errors"] == {"base": "cannot_connect"} async def test_flow_with_api_failure(hass, product_class_mock): """Test that config flow works.""" with product_class_mock as products_class: products_class.async_from_host = AsyncMock( side_effect=blebox_uniapi.error.Error ) result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER}, data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80}, ) assert result["errors"] == {"base": "cannot_connect"} async def test_flow_with_unknown_failure(hass, product_class_mock): """Test that config flow works.""" with product_class_mock as products_class: products_class.async_from_host = AsyncMock(side_effect=RuntimeError) result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER}, data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80}, ) assert result["errors"] == {"base": "unknown"} async def test_flow_with_unsupported_version(hass, product_class_mock): """Test that config flow works.""" with product_class_mock as products_class: products_class.async_from_host = AsyncMock( side_effect=blebox_uniapi.error.UnsupportedBoxVersion ) result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER}, data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80}, ) assert result["errors"] == {"base": "unsupported_version"} async def test_async_setup(hass): """Test async_setup (for coverage).""" assert await async_setup_component(hass, "blebox", {"host": "172.2.3.4"}) await hass.async_block_till_done() async def test_already_configured(hass, valid_feature_mock): """Test that same device cannot be added twice.""" config = mock_config("172.2.3.4") config.add_to_hass(hass) await hass.config_entries.async_setup(config.entry_id) await hass.async_block_till_done() result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER}, data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80}, ) assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT assert result["reason"] == "address_already_configured" async def test_async_setup_entry(hass, valid_feature_mock): """Test async_setup_entry (for coverage).""" config = mock_config() config.add_to_hass(hass) assert await hass.config_entries.async_setup(config.entry_id) await hass.async_block_till_done() assert hass.config_entries.async_entries() == [config] assert config.state is config_entries.ConfigEntryState.LOADED async def test_async_remove_entry(hass, valid_feature_mock): """Test async_setup_entry (for coverage).""" config = mock_config() config.add_to_hass(hass) assert await hass.config_entries.async_setup(config.entry_id) await hass.async_block_till_done() assert await hass.config_entries.async_remove(config.entry_id) await hass.async_block_till_done() assert hass.config_entries.async_entries() == [] assert config.state is config_entries.ConfigEntryState.NOT_LOADED
home-assistant/home-assistant
tests/components/blebox/test_config_flow.py
homeassistant/components/juicenet/__init__.py
""" Copyright 2017-2019 Fizyr (https://fizyr.com) Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ from __future__ import print_function import tensorflow as tf import sys MINIMUM_TF_VERSION = 2, 3, 0 BLACKLISTED_TF_VERSIONS = [] def tf_version(): """ Get the Tensorflow version. Returns tuple of (major, minor, patch). """ return tuple(map(int, tf.version.VERSION.split('-')[0].split('.'))) def tf_version_ok(minimum_tf_version=MINIMUM_TF_VERSION, blacklisted=BLACKLISTED_TF_VERSIONS): """ Check if the current Tensorflow version is higher than the minimum version. """ return tf_version() >= minimum_tf_version and tf_version() not in blacklisted def assert_tf_version(minimum_tf_version=MINIMUM_TF_VERSION, blacklisted=BLACKLISTED_TF_VERSIONS): """ Assert that the Tensorflow version is up to date. """ detected = tf.version.VERSION required = '.'.join(map(str, minimum_tf_version)) assert(tf_version_ok(minimum_tf_version, blacklisted)), 'You are using tensorflow version {}. The minimum required version is {} (blacklisted: {}).'.format(detected, required, blacklisted) def check_tf_version(): """ Check that the Tensorflow version is up to date. If it isn't, print an error message and exit the script. """ try: assert_tf_version() except AssertionError as e: print(e, file=sys.stderr) sys.exit(1)
""" Copyright 2018 vidosits (https://github.com/vidosits/) Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ import warnings import pytest import numpy as np from tensorflow import keras from keras_retinanet import losses from keras_retinanet.models.densenet import DenseNetBackbone parameters = ['densenet121'] @pytest.mark.parametrize("backbone", parameters) def test_backbone(backbone): # ignore warnings in this test warnings.simplefilter('ignore') num_classes = 10 inputs = np.zeros((1, 200, 400, 3), dtype=np.float32) targets = [np.zeros((1, 14814, 5), dtype=np.float32), np.zeros((1, 14814, num_classes + 1))] inp = keras.layers.Input(inputs[0].shape) densenet_backbone = DenseNetBackbone(backbone) model = densenet_backbone.retinanet(num_classes=num_classes, inputs=inp) model.summary() # compile model model.compile( loss={ 'regression': losses.smooth_l1(), 'classification': losses.focal() }, optimizer=keras.optimizers.Adam(lr=1e-5, clipnorm=0.001)) model.fit(inputs, targets, batch_size=1)
delftrobotics/keras-retinanet
tests/models/test_densenet.py
keras_retinanet/utils/tf_version.py
""" Copyright 2017-2018 Fizyr (https://fizyr.com) Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ from __future__ import division import numpy as np import cv2 from PIL import Image from .transform import change_transform_origin def read_image_bgr(path): """ Read an image in BGR format. Args path: Path to the image. """ # We deliberately don't use cv2.imread here, since it gives no feedback on errors while reading the image. image = np.ascontiguousarray(Image.open(path).convert('RGB')) return image[:, :, ::-1] def preprocess_image(x, mode='caffe'): """ Preprocess an image by subtracting the ImageNet mean. Args x: np.array of shape (None, None, 3) or (3, None, None). mode: One of "caffe" or "tf". - caffe: will zero-center each color channel with respect to the ImageNet dataset, without scaling. - tf: will scale pixels between -1 and 1, sample-wise. Returns The input with the ImageNet mean subtracted. """ # mostly identical to "https://github.com/keras-team/keras-applications/blob/master/keras_applications/imagenet_utils.py" # except for converting RGB -> BGR since we assume BGR already # covert always to float32 to keep compatibility with opencv x = x.astype(np.float32) if mode == 'tf': x /= 127.5 x -= 1. elif mode == 'caffe': x -= [103.939, 116.779, 123.68] return x def adjust_transform_for_image(transform, image, relative_translation): """ Adjust a transformation for a specific image. The translation of the matrix will be scaled with the size of the image. The linear part of the transformation will adjusted so that the origin of the transformation will be at the center of the image. """ height, width, channels = image.shape result = transform # Scale the translation with the image size if specified. if relative_translation: result[0:2, 2] *= [width, height] # Move the origin of transformation. result = change_transform_origin(transform, (0.5 * width, 0.5 * height)) return result class TransformParameters: """ Struct holding parameters determining how to apply a transformation to an image. Args fill_mode: One of: 'constant', 'nearest', 'reflect', 'wrap' interpolation: One of: 'nearest', 'linear', 'cubic', 'area', 'lanczos4' cval: Fill value to use with fill_mode='constant' relative_translation: If true (the default), interpret translation as a factor of the image size. If false, interpret it as absolute pixels. """ def __init__( self, fill_mode = 'nearest', interpolation = 'linear', cval = 0, relative_translation = True, ): self.fill_mode = fill_mode self.cval = cval self.interpolation = interpolation self.relative_translation = relative_translation def cvBorderMode(self): if self.fill_mode == 'constant': return cv2.BORDER_CONSTANT if self.fill_mode == 'nearest': return cv2.BORDER_REPLICATE if self.fill_mode == 'reflect': return cv2.BORDER_REFLECT_101 if self.fill_mode == 'wrap': return cv2.BORDER_WRAP def cvInterpolation(self): if self.interpolation == 'nearest': return cv2.INTER_NEAREST if self.interpolation == 'linear': return cv2.INTER_LINEAR if self.interpolation == 'cubic': return cv2.INTER_CUBIC if self.interpolation == 'area': return cv2.INTER_AREA if self.interpolation == 'lanczos4': return cv2.INTER_LANCZOS4 def apply_transform(matrix, image, params): """ Apply a transformation to an image. The origin of transformation is at the top left corner of the image. The matrix is interpreted such that a point (x, y) on the original image is moved to transform * (x, y) in the generated image. Mathematically speaking, that means that the matrix is a transformation from the transformed image space to the original image space. Args matrix: A homogeneous 3 by 3 matrix holding representing the transformation to apply. image: The image to transform. params: The transform parameters (see TransformParameters) """ output = cv2.warpAffine( image, matrix[:2, :], dsize = (image.shape[1], image.shape[0]), flags = params.cvInterpolation(), borderMode = params.cvBorderMode(), borderValue = params.cval, ) return output def compute_resize_scale(image_shape, min_side=800, max_side=1333): """ Compute an image scale such that the image size is constrained to min_side and max_side. Args min_side: The image's min side will be equal to min_side after resizing. max_side: If after resizing the image's max side is above max_side, resize until the max side is equal to max_side. Returns A resizing scale. """ (rows, cols, _) = image_shape smallest_side = min(rows, cols) # rescale the image so the smallest side is min_side scale = min_side / smallest_side # check if the largest side is now greater than max_side, which can happen # when images have a large aspect ratio largest_side = max(rows, cols) if largest_side * scale > max_side: scale = max_side / largest_side return scale def resize_image(img, min_side=800, max_side=1333): """ Resize an image such that the size is constrained to min_side and max_side. Args min_side: The image's min side will be equal to min_side after resizing. max_side: If after resizing the image's max side is above max_side, resize until the max side is equal to max_side. Returns A resized image. """ # compute scale to resize the image scale = compute_resize_scale(img.shape, min_side=min_side, max_side=max_side) # resize the image with the computed scale img = cv2.resize(img, None, fx=scale, fy=scale) return img, scale def _uniform(val_range): """ Uniformly sample from the given range. Args val_range: A pair of lower and upper bound. """ return np.random.uniform(val_range[0], val_range[1]) def _check_range(val_range, min_val=None, max_val=None): """ Check whether the range is a valid range. Args val_range: A pair of lower and upper bound. min_val: Minimal value for the lower bound. max_val: Maximal value for the upper bound. """ if val_range[0] > val_range[1]: raise ValueError('interval lower bound > upper bound') if min_val is not None and val_range[0] < min_val: raise ValueError('invalid interval lower bound') if max_val is not None and val_range[1] > max_val: raise ValueError('invalid interval upper bound') def _clip(image): """ Clip and convert an image to np.uint8. Args image: Image to clip. """ return np.clip(image, 0, 255).astype(np.uint8) class VisualEffect: """ Struct holding parameters and applying image color transformation. Args contrast_factor: A factor for adjusting contrast. Should be between 0 and 3. brightness_delta: Brightness offset between -1 and 1 added to the pixel values. hue_delta: Hue offset between -1 and 1 added to the hue channel. saturation_factor: A factor multiplying the saturation values of each pixel. """ def __init__( self, contrast_factor, brightness_delta, hue_delta, saturation_factor, ): self.contrast_factor = contrast_factor self.brightness_delta = brightness_delta self.hue_delta = hue_delta self.saturation_factor = saturation_factor def __call__(self, image): """ Apply a visual effect on the image. Args image: Image to adjust """ if self.contrast_factor: image = adjust_contrast(image, self.contrast_factor) if self.brightness_delta: image = adjust_brightness(image, self.brightness_delta) if self.hue_delta or self.saturation_factor: image = cv2.cvtColor(image, cv2.COLOR_BGR2HSV) if self.hue_delta: image = adjust_hue(image, self.hue_delta) if self.saturation_factor: image = adjust_saturation(image, self.saturation_factor) image = cv2.cvtColor(image, cv2.COLOR_HSV2BGR) return image def random_visual_effect_generator( contrast_range=(0.9, 1.1), brightness_range=(-.1, .1), hue_range=(-0.05, 0.05), saturation_range=(0.95, 1.05) ): """ Generate visual effect parameters uniformly sampled from the given intervals. Args contrast_factor: A factor interval for adjusting contrast. Should be between 0 and 3. brightness_delta: An interval between -1 and 1 for the amount added to the pixels. hue_delta: An interval between -1 and 1 for the amount added to the hue channel. The values are rotated if they exceed 180. saturation_factor: An interval for the factor multiplying the saturation values of each pixel. """ _check_range(contrast_range, 0) _check_range(brightness_range, -1, 1) _check_range(hue_range, -1, 1) _check_range(saturation_range, 0) def _generate(): while True: yield VisualEffect( contrast_factor=_uniform(contrast_range), brightness_delta=_uniform(brightness_range), hue_delta=_uniform(hue_range), saturation_factor=_uniform(saturation_range), ) return _generate() def adjust_contrast(image, factor): """ Adjust contrast of an image. Args image: Image to adjust. factor: A factor for adjusting contrast. """ mean = image.mean(axis=0).mean(axis=0) return _clip((image - mean) * factor + mean) def adjust_brightness(image, delta): """ Adjust brightness of an image Args image: Image to adjust. delta: Brightness offset between -1 and 1 added to the pixel values. """ return _clip(image + delta * 255) def adjust_hue(image, delta): """ Adjust hue of an image. Args image: Image to adjust. delta: An interval between -1 and 1 for the amount added to the hue channel. The values are rotated if they exceed 180. """ image[..., 0] = np.mod(image[..., 0] + delta * 180, 180) return image def adjust_saturation(image, factor): """ Adjust saturation of an image. Args image: Image to adjust. factor: An interval for the factor multiplying the saturation values of each pixel. """ image[..., 1] = np.clip(image[..., 1] * factor, 0 , 255) return image
""" Copyright 2018 vidosits (https://github.com/vidosits/) Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ import warnings import pytest import numpy as np from tensorflow import keras from keras_retinanet import losses from keras_retinanet.models.densenet import DenseNetBackbone parameters = ['densenet121'] @pytest.mark.parametrize("backbone", parameters) def test_backbone(backbone): # ignore warnings in this test warnings.simplefilter('ignore') num_classes = 10 inputs = np.zeros((1, 200, 400, 3), dtype=np.float32) targets = [np.zeros((1, 14814, 5), dtype=np.float32), np.zeros((1, 14814, num_classes + 1))] inp = keras.layers.Input(inputs[0].shape) densenet_backbone = DenseNetBackbone(backbone) model = densenet_backbone.retinanet(num_classes=num_classes, inputs=inp) model.summary() # compile model model.compile( loss={ 'regression': losses.smooth_l1(), 'classification': losses.focal() }, optimizer=keras.optimizers.Adam(lr=1e-5, clipnorm=0.001)) model.fit(inputs, targets, batch_size=1)
delftrobotics/keras-retinanet
tests/models/test_densenet.py
keras_retinanet/utils/image.py
""" DataFrame --------- An efficient 2D container for potentially mixed-type time series or other labeled data series. Similar to its R counterpart, data.frame, except providing automatic data alignment and a host of useful data manipulation methods having to do with the labeling information """ import collections from collections import OrderedDict, abc import functools from io import StringIO import itertools import sys import warnings from textwrap import dedent from typing import FrozenSet, List, Optional, Set, Type, Union import numpy as np import numpy.ma as ma from pandas._config import get_option from pandas._libs import lib, algos as libalgos from pandas.util._decorators import (Appender, Substitution, rewrite_axis_style_signature, deprecate_kwarg) from pandas.util._validators import (validate_bool_kwarg, validate_axis_style_args) from pandas.compat import PY36, raise_with_traceback from pandas.compat.numpy import function as nv from pandas.core.arrays.sparse import SparseFrameAccessor from pandas.core.dtypes.cast import ( maybe_upcast, cast_scalar_to_array, infer_dtype_from_scalar, maybe_cast_to_datetime, maybe_infer_to_datetimelike, maybe_convert_platform, maybe_downcast_to_dtype, invalidate_string_dtypes, coerce_to_dtypes, maybe_upcast_putmask, find_common_type) from pandas.core.dtypes.common import ( is_dict_like, is_datetime64tz_dtype, is_object_dtype, is_extension_type, is_extension_array_dtype, is_datetime64_any_dtype, is_bool_dtype, is_integer_dtype, is_float_dtype, is_integer, is_scalar, is_dtype_equal, needs_i8_conversion, infer_dtype_from_object, ensure_float64, ensure_int64, ensure_platform_int, is_list_like, is_nested_list_like, is_iterator, is_sequence, is_named_tuple) from pandas.core.dtypes.generic import ( ABCSeries, ABCDataFrame, ABCIndexClass, ABCMultiIndex) from pandas.core.dtypes.missing import isna, notna from pandas.core import algorithms from pandas.core import common as com from pandas.core import nanops from pandas.core import ops from pandas.core.accessor import CachedAccessor from pandas.core.arrays import Categorical, ExtensionArray from pandas.core.arrays.datetimelike import ( DatetimeLikeArrayMixin as DatetimeLikeArray ) from pandas.core.generic import NDFrame, _shared_docs from pandas.core.index import (Index, MultiIndex, ensure_index, ensure_index_from_sequences) from pandas.core.indexes import base as ibase from pandas.core.indexes.datetimes import DatetimeIndex from pandas.core.indexes.period import PeriodIndex from pandas.core.indexing import (maybe_droplevels, convert_to_index_sliceable, check_bool_indexer) from pandas.core.internals import BlockManager from pandas.core.internals.construction import ( masked_rec_array_to_mgr, get_names_from_index, to_arrays, reorder_arrays, init_ndarray, init_dict, arrays_to_mgr, sanitize_index) from pandas.core.series import Series from pandas.io.formats import console from pandas.io.formats import format as fmt from pandas.io.formats.printing import pprint_thing import pandas.plotting # --------------------------------------------------------------------- # Docstring templates _shared_doc_kwargs = dict( axes='index, columns', klass='DataFrame', axes_single_arg="{0 or 'index', 1 or 'columns'}", axis="""axis : {0 or 'index', 1 or 'columns'}, default 0 If 0 or 'index': apply function to each column. If 1 or 'columns': apply function to each row.""", optional_by=""" by : str or list of str Name or list of names to sort by. - if `axis` is 0 or `'index'` then `by` may contain index levels and/or column labels - if `axis` is 1 or `'columns'` then `by` may contain column levels and/or index labels .. versionchanged:: 0.23.0 Allow specifying index or column level names.""", versionadded_to_excel='', optional_labels="""labels : array-like, optional New labels / index to conform the axis specified by 'axis' to.""", optional_axis="""axis : int or str, optional Axis to target. Can be either the axis name ('index', 'columns') or number (0, 1).""", ) _numeric_only_doc = """numeric_only : boolean, default None Include only float, int, boolean data. If None, will attempt to use everything, then use only numeric data """ _merge_doc = """ Merge DataFrame or named Series objects with a database-style join. The join is done on columns or indexes. If joining columns on columns, the DataFrame indexes *will be ignored*. Otherwise if joining indexes on indexes or indexes on a column or columns, the index will be passed on. Parameters ----------%s right : DataFrame or named Series Object to merge with. how : {'left', 'right', 'outer', 'inner'}, default 'inner' Type of merge to be performed. * left: use only keys from left frame, similar to a SQL left outer join; preserve key order. * right: use only keys from right frame, similar to a SQL right outer join; preserve key order. * outer: use union of keys from both frames, similar to a SQL full outer join; sort keys lexicographically. * inner: use intersection of keys from both frames, similar to a SQL inner join; preserve the order of the left keys. on : label or list Column or index level names to join on. These must be found in both DataFrames. If `on` is None and not merging on indexes then this defaults to the intersection of the columns in both DataFrames. left_on : label or list, or array-like Column or index level names to join on in the left DataFrame. Can also be an array or list of arrays of the length of the left DataFrame. These arrays are treated as if they are columns. right_on : label or list, or array-like Column or index level names to join on in the right DataFrame. Can also be an array or list of arrays of the length of the right DataFrame. These arrays are treated as if they are columns. left_index : bool, default False Use the index from the left DataFrame as the join key(s). If it is a MultiIndex, the number of keys in the other DataFrame (either the index or a number of columns) must match the number of levels. right_index : bool, default False Use the index from the right DataFrame as the join key. Same caveats as left_index. sort : bool, default False Sort the join keys lexicographically in the result DataFrame. If False, the order of the join keys depends on the join type (how keyword). suffixes : tuple of (str, str), default ('_x', '_y') Suffix to apply to overlapping column names in the left and right side, respectively. To raise an exception on overlapping columns use (False, False). copy : bool, default True If False, avoid copy if possible. indicator : bool or str, default False If True, adds a column to output DataFrame called "_merge" with information on the source of each row. If string, column with information on source of each row will be added to output DataFrame, and column will be named value of string. Information column is Categorical-type and takes on a value of "left_only" for observations whose merge key only appears in 'left' DataFrame, "right_only" for observations whose merge key only appears in 'right' DataFrame, and "both" if the observation's merge key is found in both. validate : str, optional If specified, checks if merge is of specified type. * "one_to_one" or "1:1": check if merge keys are unique in both left and right datasets. * "one_to_many" or "1:m": check if merge keys are unique in left dataset. * "many_to_one" or "m:1": check if merge keys are unique in right dataset. * "many_to_many" or "m:m": allowed, but does not result in checks. .. versionadded:: 0.21.0 Returns ------- DataFrame A DataFrame of the two merged objects. See Also -------- merge_ordered : Merge with optional filling/interpolation. merge_asof : Merge on nearest keys. DataFrame.join : Similar method using indices. Notes ----- Support for specifying index levels as the `on`, `left_on`, and `right_on` parameters was added in version 0.23.0 Support for merging named Series objects was added in version 0.24.0 Examples -------- >>> df1 = pd.DataFrame({'lkey': ['foo', 'bar', 'baz', 'foo'], ... 'value': [1, 2, 3, 5]}) >>> df2 = pd.DataFrame({'rkey': ['foo', 'bar', 'baz', 'foo'], ... 'value': [5, 6, 7, 8]}) >>> df1 lkey value 0 foo 1 1 bar 2 2 baz 3 3 foo 5 >>> df2 rkey value 0 foo 5 1 bar 6 2 baz 7 3 foo 8 Merge df1 and df2 on the lkey and rkey columns. The value columns have the default suffixes, _x and _y, appended. >>> df1.merge(df2, left_on='lkey', right_on='rkey') lkey value_x rkey value_y 0 foo 1 foo 5 1 foo 1 foo 8 2 foo 5 foo 5 3 foo 5 foo 8 4 bar 2 bar 6 5 baz 3 baz 7 Merge DataFrames df1 and df2 with specified left and right suffixes appended to any overlapping columns. >>> df1.merge(df2, left_on='lkey', right_on='rkey', ... suffixes=('_left', '_right')) lkey value_left rkey value_right 0 foo 1 foo 5 1 foo 1 foo 8 2 foo 5 foo 5 3 foo 5 foo 8 4 bar 2 bar 6 5 baz 3 baz 7 Merge DataFrames df1 and df2, but raise an exception if the DataFrames have any overlapping columns. >>> df1.merge(df2, left_on='lkey', right_on='rkey', suffixes=(False, False)) Traceback (most recent call last): ... ValueError: columns overlap but no suffix specified: Index(['value'], dtype='object') """ # ----------------------------------------------------------------------- # DataFrame class class DataFrame(NDFrame): """ Two-dimensional size-mutable, potentially heterogeneous tabular data structure with labeled axes (rows and columns). Arithmetic operations align on both row and column labels. Can be thought of as a dict-like container for Series objects. The primary pandas data structure. Parameters ---------- data : ndarray (structured or homogeneous), Iterable, dict, or DataFrame Dict can contain Series, arrays, constants, or list-like objects .. versionchanged :: 0.23.0 If data is a dict, argument order is maintained for Python 3.6 and later. index : Index or array-like Index to use for resulting frame. Will default to RangeIndex if no indexing information part of input data and no index provided columns : Index or array-like Column labels to use for resulting frame. Will default to RangeIndex (0, 1, 2, ..., n) if no column labels are provided dtype : dtype, default None Data type to force. Only a single dtype is allowed. If None, infer copy : boolean, default False Copy data from inputs. Only affects DataFrame / 2d ndarray input See Also -------- DataFrame.from_records : Constructor from tuples, also record arrays. DataFrame.from_dict : From dicts of Series, arrays, or dicts. DataFrame.from_items : From sequence of (key, value) pairs read_csv, pandas.read_table, pandas.read_clipboard. Examples -------- Constructing DataFrame from a dictionary. >>> d = {'col1': [1, 2], 'col2': [3, 4]} >>> df = pd.DataFrame(data=d) >>> df col1 col2 0 1 3 1 2 4 Notice that the inferred dtype is int64. >>> df.dtypes col1 int64 col2 int64 dtype: object To enforce a single dtype: >>> df = pd.DataFrame(data=d, dtype=np.int8) >>> df.dtypes col1 int8 col2 int8 dtype: object Constructing DataFrame from numpy ndarray: >>> df2 = pd.DataFrame(np.array([[1, 2, 3], [4, 5, 6], [7, 8, 9]]), ... columns=['a', 'b', 'c']) >>> df2 a b c 0 1 2 3 1 4 5 6 2 7 8 9 """ @property def _constructor(self): return DataFrame _constructor_sliced = Series # type: Type[Series] _deprecations = NDFrame._deprecations | frozenset([ 'get_value', 'set_value', 'from_csv', 'from_items' ]) # type: FrozenSet[str] _accessors = set() # type: Set[str] @property def _constructor_expanddim(self): raise NotImplementedError("Not supported for DataFrames!") # ---------------------------------------------------------------------- # Constructors def __init__(self, data=None, index=None, columns=None, dtype=None, copy=False): if data is None: data = {} if dtype is not None: dtype = self._validate_dtype(dtype) if isinstance(data, DataFrame): data = data._data if isinstance(data, BlockManager): mgr = self._init_mgr(data, axes=dict(index=index, columns=columns), dtype=dtype, copy=copy) elif isinstance(data, dict): mgr = init_dict(data, index, columns, dtype=dtype) elif isinstance(data, ma.MaskedArray): import numpy.ma.mrecords as mrecords # masked recarray if isinstance(data, mrecords.MaskedRecords): mgr = masked_rec_array_to_mgr(data, index, columns, dtype, copy) # a masked array else: mask = ma.getmaskarray(data) if mask.any(): data, fill_value = maybe_upcast(data, copy=True) data.soften_mask() # set hardmask False if it was True data[mask] = fill_value else: data = data.copy() mgr = init_ndarray(data, index, columns, dtype=dtype, copy=copy) elif isinstance(data, (np.ndarray, Series, Index)): if data.dtype.names: data_columns = list(data.dtype.names) data = {k: data[k] for k in data_columns} if columns is None: columns = data_columns mgr = init_dict(data, index, columns, dtype=dtype) elif getattr(data, 'name', None) is not None: mgr = init_dict({data.name: data}, index, columns, dtype=dtype) else: mgr = init_ndarray(data, index, columns, dtype=dtype, copy=copy) # For data is list-like, or Iterable (will consume into list) elif (isinstance(data, abc.Iterable) and not isinstance(data, (str, bytes))): if not isinstance(data, abc.Sequence): data = list(data) if len(data) > 0: if is_list_like(data[0]) and getattr(data[0], 'ndim', 1) == 1: if is_named_tuple(data[0]) and columns is None: columns = data[0]._fields arrays, columns = to_arrays(data, columns, dtype=dtype) columns = ensure_index(columns) # set the index if index is None: if isinstance(data[0], Series): index = get_names_from_index(data) elif isinstance(data[0], Categorical): index = ibase.default_index(len(data[0])) else: index = ibase.default_index(len(data)) mgr = arrays_to_mgr(arrays, columns, index, columns, dtype=dtype) else: mgr = init_ndarray(data, index, columns, dtype=dtype, copy=copy) else: mgr = init_dict({}, index, columns, dtype=dtype) else: try: arr = np.array(data, dtype=dtype, copy=copy) except (ValueError, TypeError) as e: exc = TypeError('DataFrame constructor called with ' 'incompatible data and dtype: {e}'.format(e=e)) raise_with_traceback(exc) if arr.ndim == 0 and index is not None and columns is not None: values = cast_scalar_to_array((len(index), len(columns)), data, dtype=dtype) mgr = init_ndarray(values, index, columns, dtype=values.dtype, copy=False) else: raise ValueError('DataFrame constructor not properly called!') NDFrame.__init__(self, mgr, fastpath=True) # ---------------------------------------------------------------------- @property def axes(self): """ Return a list representing the axes of the DataFrame. It has the row axis labels and column axis labels as the only members. They are returned in that order. Examples -------- >>> df = pd.DataFrame({'col1': [1, 2], 'col2': [3, 4]}) >>> df.axes [RangeIndex(start=0, stop=2, step=1), Index(['col1', 'col2'], dtype='object')] """ return [self.index, self.columns] @property def shape(self): """ Return a tuple representing the dimensionality of the DataFrame. See Also -------- ndarray.shape Examples -------- >>> df = pd.DataFrame({'col1': [1, 2], 'col2': [3, 4]}) >>> df.shape (2, 2) >>> df = pd.DataFrame({'col1': [1, 2], 'col2': [3, 4], ... 'col3': [5, 6]}) >>> df.shape (2, 3) """ return len(self.index), len(self.columns) @property def _is_homogeneous_type(self): """ Whether all the columns in a DataFrame have the same type. Returns ------- bool Examples -------- >>> DataFrame({"A": [1, 2], "B": [3, 4]})._is_homogeneous_type True >>> DataFrame({"A": [1, 2], "B": [3.0, 4.0]})._is_homogeneous_type False Items with the same type but different sizes are considered different types. >>> DataFrame({ ... "A": np.array([1, 2], dtype=np.int32), ... "B": np.array([1, 2], dtype=np.int64)})._is_homogeneous_type False """ if self._data.any_extension_types: return len({block.dtype for block in self._data.blocks}) == 1 else: return not self._data.is_mixed_type # ---------------------------------------------------------------------- # Rendering Methods def _repr_fits_vertical_(self): """ Check length against max_rows. """ max_rows = get_option("display.max_rows") return len(self) <= max_rows def _repr_fits_horizontal_(self, ignore_width=False): """ Check if full repr fits in horizontal boundaries imposed by the display options width and max_columns. In case off non-interactive session, no boundaries apply. `ignore_width` is here so ipnb+HTML output can behave the way users expect. display.max_columns remains in effect. GH3541, GH3573 """ width, height = console.get_console_size() max_columns = get_option("display.max_columns") nb_columns = len(self.columns) # exceed max columns if ((max_columns and nb_columns > max_columns) or ((not ignore_width) and width and nb_columns > (width // 2))): return False # used by repr_html under IPython notebook or scripts ignore terminal # dims if ignore_width or not console.in_interactive_session(): return True if (get_option('display.width') is not None or console.in_ipython_frontend()): # check at least the column row for excessive width max_rows = 1 else: max_rows = get_option("display.max_rows") # when auto-detecting, so width=None and not in ipython front end # check whether repr fits horizontal by actually checking # the width of the rendered repr buf = StringIO() # only care about the stuff we'll actually print out # and to_string on entire frame may be expensive d = self if not (max_rows is None): # unlimited rows # min of two, where one may be None d = d.iloc[:min(max_rows, len(d))] else: return True d.to_string(buf=buf) value = buf.getvalue() repr_width = max(len(l) for l in value.split('\n')) return repr_width < width def _info_repr(self): """ True if the repr should show the info view. """ info_repr_option = (get_option("display.large_repr") == "info") return info_repr_option and not (self._repr_fits_horizontal_() and self._repr_fits_vertical_()) def __repr__(self): """ Return a string representation for a particular DataFrame. """ buf = StringIO("") if self._info_repr(): self.info(buf=buf) return buf.getvalue() max_rows = get_option("display.max_rows") max_cols = get_option("display.max_columns") show_dimensions = get_option("display.show_dimensions") if get_option("display.expand_frame_repr"): width, _ = console.get_console_size() else: width = None self.to_string(buf=buf, max_rows=max_rows, max_cols=max_cols, line_width=width, show_dimensions=show_dimensions) return buf.getvalue() def _repr_html_(self): """ Return a html representation for a particular DataFrame. Mainly for IPython notebook. """ if self._info_repr(): buf = StringIO("") self.info(buf=buf) # need to escape the <class>, should be the first line. val = buf.getvalue().replace('<', r'&lt;', 1) val = val.replace('>', r'&gt;', 1) return '<pre>' + val + '</pre>' if get_option("display.notebook_repr_html"): max_rows = get_option("display.max_rows") max_cols = get_option("display.max_columns") show_dimensions = get_option("display.show_dimensions") return self.to_html(max_rows=max_rows, max_cols=max_cols, show_dimensions=show_dimensions, notebook=True) else: return None @Substitution(header='Write out the column names. If a list of strings ' 'is given, it is assumed to be aliases for the ' 'column names', col_space_type='int', col_space='The minimum width of each column') @Substitution(shared_params=fmt.common_docstring, returns=fmt.return_docstring) def to_string(self, buf=None, columns=None, col_space=None, header=True, index=True, na_rep='NaN', formatters=None, float_format=None, sparsify=None, index_names=True, justify=None, max_rows=None, max_cols=None, show_dimensions=False, decimal='.', line_width=None): """ Render a DataFrame to a console-friendly tabular output. %(shared_params)s line_width : int, optional Width to wrap a line in characters. %(returns)s See Also -------- to_html : Convert DataFrame to HTML. Examples -------- >>> d = {'col1': [1, 2, 3], 'col2': [4, 5, 6]} >>> df = pd.DataFrame(d) >>> print(df.to_string()) col1 col2 0 1 4 1 2 5 2 3 6 """ formatter = fmt.DataFrameFormatter(self, buf=buf, columns=columns, col_space=col_space, na_rep=na_rep, formatters=formatters, float_format=float_format, sparsify=sparsify, justify=justify, index_names=index_names, header=header, index=index, max_rows=max_rows, max_cols=max_cols, show_dimensions=show_dimensions, decimal=decimal, line_width=line_width) formatter.to_string() if buf is None: result = formatter.buf.getvalue() return result # ---------------------------------------------------------------------- @property def style(self): """ Property returning a Styler object containing methods for building a styled HTML representation fo the DataFrame. See Also -------- io.formats.style.Styler """ from pandas.io.formats.style import Styler return Styler(self) def iteritems(self): r""" Iterator over (column name, Series) pairs. Iterates over the DataFrame columns, returning a tuple with the column name and the content as a Series. Yields ------ label : object The column names for the DataFrame being iterated over. content : Series The column entries belonging to each label, as a Series. See Also -------- DataFrame.iterrows : Iterate over DataFrame rows as (index, Series) pairs. DataFrame.itertuples : Iterate over DataFrame rows as namedtuples of the values. Examples -------- >>> df = pd.DataFrame({'species': ['bear', 'bear', 'marsupial'], ... 'population': [1864, 22000, 80000]}, ... index=['panda', 'polar', 'koala']) >>> df species population panda bear 1864 polar bear 22000 koala marsupial 80000 >>> for label, content in df.iteritems(): ... print('label:', label) ... print('content:', content, sep='\n') ... label: species content: panda bear polar bear koala marsupial Name: species, dtype: object label: population content: panda 1864 polar 22000 koala 80000 Name: population, dtype: int64 """ if self.columns.is_unique and hasattr(self, '_item_cache'): for k in self.columns: yield k, self._get_item_cache(k) else: for i, k in enumerate(self.columns): yield k, self._ixs(i, axis=1) def iterrows(self): """ Iterate over DataFrame rows as (index, Series) pairs. Yields ------ index : label or tuple of label The index of the row. A tuple for a `MultiIndex`. data : Series The data of the row as a Series. it : generator A generator that iterates over the rows of the frame. See Also -------- itertuples : Iterate over DataFrame rows as namedtuples of the values. iteritems : Iterate over (column name, Series) pairs. Notes ----- 1. Because ``iterrows`` returns a Series for each row, it does **not** preserve dtypes across the rows (dtypes are preserved across columns for DataFrames). For example, >>> df = pd.DataFrame([[1, 1.5]], columns=['int', 'float']) >>> row = next(df.iterrows())[1] >>> row int 1.0 float 1.5 Name: 0, dtype: float64 >>> print(row['int'].dtype) float64 >>> print(df['int'].dtype) int64 To preserve dtypes while iterating over the rows, it is better to use :meth:`itertuples` which returns namedtuples of the values and which is generally faster than ``iterrows``. 2. You should **never modify** something you are iterating over. This is not guaranteed to work in all cases. Depending on the data types, the iterator returns a copy and not a view, and writing to it will have no effect. """ columns = self.columns klass = self._constructor_sliced for k, v in zip(self.index, self.values): s = klass(v, index=columns, name=k) yield k, s def itertuples(self, index=True, name="Pandas"): """ Iterate over DataFrame rows as namedtuples. Parameters ---------- index : bool, default True If True, return the index as the first element of the tuple. name : str or None, default "Pandas" The name of the returned namedtuples or None to return regular tuples. Returns ------- iterator An object to iterate over namedtuples for each row in the DataFrame with the first field possibly being the index and following fields being the column values. See Also -------- DataFrame.iterrows : Iterate over DataFrame rows as (index, Series) pairs. DataFrame.iteritems : Iterate over (column name, Series) pairs. Notes ----- The column names will be renamed to positional names if they are invalid Python identifiers, repeated, or start with an underscore. With a large number of columns (>255), regular tuples are returned. Examples -------- >>> df = pd.DataFrame({'num_legs': [4, 2], 'num_wings': [0, 2]}, ... index=['dog', 'hawk']) >>> df num_legs num_wings dog 4 0 hawk 2 2 >>> for row in df.itertuples(): ... print(row) ... Pandas(Index='dog', num_legs=4, num_wings=0) Pandas(Index='hawk', num_legs=2, num_wings=2) By setting the `index` parameter to False we can remove the index as the first element of the tuple: >>> for row in df.itertuples(index=False): ... print(row) ... Pandas(num_legs=4, num_wings=0) Pandas(num_legs=2, num_wings=2) With the `name` parameter set we set a custom name for the yielded namedtuples: >>> for row in df.itertuples(name='Animal'): ... print(row) ... Animal(Index='dog', num_legs=4, num_wings=0) Animal(Index='hawk', num_legs=2, num_wings=2) """ arrays = [] fields = list(self.columns) if index: arrays.append(self.index) fields.insert(0, "Index") # use integer indexing because of possible duplicate column names arrays.extend(self.iloc[:, k] for k in range(len(self.columns))) # Python 3 supports at most 255 arguments to constructor if name is not None and len(self.columns) + index < 256: itertuple = collections.namedtuple(name, fields, rename=True) return map(itertuple._make, zip(*arrays)) # fallback to regular tuples return zip(*arrays) items = iteritems def __len__(self): """ Returns length of info axis, but here we use the index. """ return len(self.index) def dot(self, other): """ Compute the matrix multiplication between the DataFrame and other. This method computes the matrix product between the DataFrame and the values of an other Series, DataFrame or a numpy array. It can also be called using ``self @ other`` in Python >= 3.5. Parameters ---------- other : Series, DataFrame or array-like The other object to compute the matrix product with. Returns ------- Series or DataFrame If other is a Series, return the matrix product between self and other as a Serie. If other is a DataFrame or a numpy.array, return the matrix product of self and other in a DataFrame of a np.array. See Also -------- Series.dot: Similar method for Series. Notes ----- The dimensions of DataFrame and other must be compatible in order to compute the matrix multiplication. In addition, the column names of DataFrame and the index of other must contain the same values, as they will be aligned prior to the multiplication. The dot method for Series computes the inner product, instead of the matrix product here. Examples -------- Here we multiply a DataFrame with a Series. >>> df = pd.DataFrame([[0, 1, -2, -1], [1, 1, 1, 1]]) >>> s = pd.Series([1, 1, 2, 1]) >>> df.dot(s) 0 -4 1 5 dtype: int64 Here we multiply a DataFrame with another DataFrame. >>> other = pd.DataFrame([[0, 1], [1, 2], [-1, -1], [2, 0]]) >>> df.dot(other) 0 1 0 1 4 1 2 2 Note that the dot method give the same result as @ >>> df @ other 0 1 0 1 4 1 2 2 The dot method works also if other is an np.array. >>> arr = np.array([[0, 1], [1, 2], [-1, -1], [2, 0]]) >>> df.dot(arr) 0 1 0 1 4 1 2 2 Note how shuffling of the objects does not change the result. >>> s2 = s.reindex([1, 0, 2, 3]) >>> df.dot(s2) 0 -4 1 5 dtype: int64 """ if isinstance(other, (Series, DataFrame)): common = self.columns.union(other.index) if (len(common) > len(self.columns) or len(common) > len(other.index)): raise ValueError('matrices are not aligned') left = self.reindex(columns=common, copy=False) right = other.reindex(index=common, copy=False) lvals = left.values rvals = right.values else: left = self lvals = self.values rvals = np.asarray(other) if lvals.shape[1] != rvals.shape[0]: raise ValueError('Dot product shape mismatch, ' '{s} vs {r}'.format(s=lvals.shape, r=rvals.shape)) if isinstance(other, DataFrame): return self._constructor(np.dot(lvals, rvals), index=left.index, columns=other.columns) elif isinstance(other, Series): return Series(np.dot(lvals, rvals), index=left.index) elif isinstance(rvals, (np.ndarray, Index)): result = np.dot(lvals, rvals) if result.ndim == 2: return self._constructor(result, index=left.index) else: return Series(result, index=left.index) else: # pragma: no cover raise TypeError('unsupported type: {oth}'.format(oth=type(other))) def __matmul__(self, other): """ Matrix multiplication using binary `@` operator in Python>=3.5. """ return self.dot(other) def __rmatmul__(self, other): """ Matrix multiplication using binary `@` operator in Python>=3.5. """ return self.T.dot(np.transpose(other)).T # ---------------------------------------------------------------------- # IO methods (to / from other formats) @classmethod def from_dict(cls, data, orient='columns', dtype=None, columns=None): """ Construct DataFrame from dict of array-like or dicts. Creates DataFrame object from dictionary by columns or by index allowing dtype specification. Parameters ---------- data : dict Of the form {field : array-like} or {field : dict}. orient : {'columns', 'index'}, default 'columns' The "orientation" of the data. If the keys of the passed dict should be the columns of the resulting DataFrame, pass 'columns' (default). Otherwise if the keys should be rows, pass 'index'. dtype : dtype, default None Data type to force, otherwise infer. columns : list, default None Column labels to use when ``orient='index'``. Raises a ValueError if used with ``orient='columns'``. .. versionadded:: 0.23.0 Returns ------- DataFrame See Also -------- DataFrame.from_records : DataFrame from ndarray (structured dtype), list of tuples, dict, or DataFrame. DataFrame : DataFrame object creation using constructor. Examples -------- By default the keys of the dict become the DataFrame columns: >>> data = {'col_1': [3, 2, 1, 0], 'col_2': ['a', 'b', 'c', 'd']} >>> pd.DataFrame.from_dict(data) col_1 col_2 0 3 a 1 2 b 2 1 c 3 0 d Specify ``orient='index'`` to create the DataFrame using dictionary keys as rows: >>> data = {'row_1': [3, 2, 1, 0], 'row_2': ['a', 'b', 'c', 'd']} >>> pd.DataFrame.from_dict(data, orient='index') 0 1 2 3 row_1 3 2 1 0 row_2 a b c d When using the 'index' orientation, the column names can be specified manually: >>> pd.DataFrame.from_dict(data, orient='index', ... columns=['A', 'B', 'C', 'D']) A B C D row_1 3 2 1 0 row_2 a b c d """ index = None orient = orient.lower() if orient == 'index': if len(data) > 0: # TODO speed up Series case if isinstance(list(data.values())[0], (Series, dict)): data = _from_nested_dict(data) else: data, index = list(data.values()), list(data.keys()) elif orient == 'columns': if columns is not None: raise ValueError("cannot use columns parameter with " "orient='columns'") else: # pragma: no cover raise ValueError('only recognize index or columns for orient') return cls(data, index=index, columns=columns, dtype=dtype) def to_numpy(self, dtype=None, copy=False): """ Convert the DataFrame to a NumPy array. .. versionadded:: 0.24.0 By default, the dtype of the returned array will be the common NumPy dtype of all types in the DataFrame. For example, if the dtypes are ``float16`` and ``float32``, the results dtype will be ``float32``. This may require copying data and coercing values, which may be expensive. Parameters ---------- dtype : str or numpy.dtype, optional The dtype to pass to :meth:`numpy.asarray` copy : bool, default False Whether to ensure that the returned value is a not a view on another array. Note that ``copy=False`` does not *ensure* that ``to_numpy()`` is no-copy. Rather, ``copy=True`` ensure that a copy is made, even if not strictly necessary. Returns ------- numpy.ndarray See Also -------- Series.to_numpy : Similar method for Series. Examples -------- >>> pd.DataFrame({"A": [1, 2], "B": [3, 4]}).to_numpy() array([[1, 3], [2, 4]]) With heterogenous data, the lowest common type will have to be used. >>> df = pd.DataFrame({"A": [1, 2], "B": [3.0, 4.5]}) >>> df.to_numpy() array([[1. , 3. ], [2. , 4.5]]) For a mix of numeric and non-numeric types, the output array will have object dtype. >>> df['C'] = pd.date_range('2000', periods=2) >>> df.to_numpy() array([[1, 3.0, Timestamp('2000-01-01 00:00:00')], [2, 4.5, Timestamp('2000-01-02 00:00:00')]], dtype=object) """ result = np.array(self.values, dtype=dtype, copy=copy) return result def to_dict(self, orient='dict', into=dict): """ Convert the DataFrame to a dictionary. The type of the key-value pairs can be customized with the parameters (see below). Parameters ---------- orient : str {'dict', 'list', 'series', 'split', 'records', 'index'} Determines the type of the values of the dictionary. - 'dict' (default) : dict like {column -> {index -> value}} - 'list' : dict like {column -> [values]} - 'series' : dict like {column -> Series(values)} - 'split' : dict like {'index' -> [index], 'columns' -> [columns], 'data' -> [values]} - 'records' : list like [{column -> value}, ... , {column -> value}] - 'index' : dict like {index -> {column -> value}} Abbreviations are allowed. `s` indicates `series` and `sp` indicates `split`. into : class, default dict The collections.abc.Mapping subclass used for all Mappings in the return value. Can be the actual class or an empty instance of the mapping type you want. If you want a collections.defaultdict, you must pass it initialized. .. versionadded:: 0.21.0 Returns ------- dict, list or collections.abc.Mapping Return a collections.abc.Mapping object representing the DataFrame. The resulting transformation depends on the `orient` parameter. See Also -------- DataFrame.from_dict: Create a DataFrame from a dictionary. DataFrame.to_json: Convert a DataFrame to JSON format. Examples -------- >>> df = pd.DataFrame({'col1': [1, 2], ... 'col2': [0.5, 0.75]}, ... index=['row1', 'row2']) >>> df col1 col2 row1 1 0.50 row2 2 0.75 >>> df.to_dict() {'col1': {'row1': 1, 'row2': 2}, 'col2': {'row1': 0.5, 'row2': 0.75}} You can specify the return orientation. >>> df.to_dict('series') {'col1': row1 1 row2 2 Name: col1, dtype: int64, 'col2': row1 0.50 row2 0.75 Name: col2, dtype: float64} >>> df.to_dict('split') {'index': ['row1', 'row2'], 'columns': ['col1', 'col2'], 'data': [[1, 0.5], [2, 0.75]]} >>> df.to_dict('records') [{'col1': 1, 'col2': 0.5}, {'col1': 2, 'col2': 0.75}] >>> df.to_dict('index') {'row1': {'col1': 1, 'col2': 0.5}, 'row2': {'col1': 2, 'col2': 0.75}} You can also specify the mapping type. >>> from collections import OrderedDict, defaultdict >>> df.to_dict(into=OrderedDict) OrderedDict([('col1', OrderedDict([('row1', 1), ('row2', 2)])), ('col2', OrderedDict([('row1', 0.5), ('row2', 0.75)]))]) If you want a `defaultdict`, you need to initialize it: >>> dd = defaultdict(list) >>> df.to_dict('records', into=dd) [defaultdict(<class 'list'>, {'col1': 1, 'col2': 0.5}), defaultdict(<class 'list'>, {'col1': 2, 'col2': 0.75})] """ if not self.columns.is_unique: warnings.warn("DataFrame columns are not unique, some " "columns will be omitted.", UserWarning, stacklevel=2) # GH16122 into_c = com.standardize_mapping(into) if orient.lower().startswith('d'): return into_c( (k, v.to_dict(into)) for k, v in self.items()) elif orient.lower().startswith('l'): return into_c((k, v.tolist()) for k, v in self.items()) elif orient.lower().startswith('sp'): return into_c((('index', self.index.tolist()), ('columns', self.columns.tolist()), ('data', [ list(map(com.maybe_box_datetimelike, t)) for t in self.itertuples(index=False, name=None) ]))) elif orient.lower().startswith('s'): return into_c((k, com.maybe_box_datetimelike(v)) for k, v in self.items()) elif orient.lower().startswith('r'): columns = self.columns.tolist() rows = (dict(zip(columns, row)) for row in self.itertuples(index=False, name=None)) return [ into_c((k, com.maybe_box_datetimelike(v)) for k, v in row.items()) for row in rows] elif orient.lower().startswith('i'): if not self.index.is_unique: raise ValueError( "DataFrame index must be unique for orient='index'." ) return into_c((t[0], dict(zip(self.columns, t[1:]))) for t in self.itertuples(name=None)) else: raise ValueError("orient '{o}' not understood".format(o=orient)) def to_gbq(self, destination_table, project_id=None, chunksize=None, reauth=False, if_exists='fail', auth_local_webserver=False, table_schema=None, location=None, progress_bar=True, credentials=None, verbose=None, private_key=None): """ Write a DataFrame to a Google BigQuery table. This function requires the `pandas-gbq package <https://pandas-gbq.readthedocs.io>`__. See the `How to authenticate with Google BigQuery <https://pandas-gbq.readthedocs.io/en/latest/howto/authentication.html>`__ guide for authentication instructions. Parameters ---------- destination_table : str Name of table to be written, in the form ``dataset.tablename``. project_id : str, optional Google BigQuery Account project ID. Optional when available from the environment. chunksize : int, optional Number of rows to be inserted in each chunk from the dataframe. Set to ``None`` to load the whole dataframe at once. reauth : bool, default False Force Google BigQuery to re-authenticate the user. This is useful if multiple accounts are used. if_exists : str, default 'fail' Behavior when the destination table exists. Value can be one of: ``'fail'`` If table exists, do nothing. ``'replace'`` If table exists, drop it, recreate it, and insert data. ``'append'`` If table exists, insert data. Create if does not exist. auth_local_webserver : bool, default False Use the `local webserver flow`_ instead of the `console flow`_ when getting user credentials. .. _local webserver flow: http://google-auth-oauthlib.readthedocs.io/en/latest/reference/google_auth_oauthlib.flow.html#google_auth_oauthlib.flow.InstalledAppFlow.run_local_server .. _console flow: http://google-auth-oauthlib.readthedocs.io/en/latest/reference/google_auth_oauthlib.flow.html#google_auth_oauthlib.flow.InstalledAppFlow.run_console *New in version 0.2.0 of pandas-gbq*. table_schema : list of dicts, optional List of BigQuery table fields to which according DataFrame columns conform to, e.g. ``[{'name': 'col1', 'type': 'STRING'},...]``. If schema is not provided, it will be generated according to dtypes of DataFrame columns. See BigQuery API documentation on available names of a field. *New in version 0.3.1 of pandas-gbq*. location : str, optional Location where the load job should run. See the `BigQuery locations documentation <https://cloud.google.com/bigquery/docs/dataset-locations>`__ for a list of available locations. The location must match that of the target dataset. *New in version 0.5.0 of pandas-gbq*. progress_bar : bool, default True Use the library `tqdm` to show the progress bar for the upload, chunk by chunk. *New in version 0.5.0 of pandas-gbq*. credentials : google.auth.credentials.Credentials, optional Credentials for accessing Google APIs. Use this parameter to override default credentials, such as to use Compute Engine :class:`google.auth.compute_engine.Credentials` or Service Account :class:`google.oauth2.service_account.Credentials` directly. *New in version 0.8.0 of pandas-gbq*. .. versionadded:: 0.24.0 verbose : bool, deprecated Deprecated in pandas-gbq version 0.4.0. Use the `logging module to adjust verbosity instead <https://pandas-gbq.readthedocs.io/en/latest/intro.html#logging>`__. private_key : str, deprecated Deprecated in pandas-gbq version 0.8.0. Use the ``credentials`` parameter and :func:`google.oauth2.service_account.Credentials.from_service_account_info` or :func:`google.oauth2.service_account.Credentials.from_service_account_file` instead. Service account private key in JSON format. Can be file path or string contents. This is useful for remote server authentication (eg. Jupyter/IPython notebook on remote host). See Also -------- pandas_gbq.to_gbq : This function in the pandas-gbq library. read_gbq : Read a DataFrame from Google BigQuery. """ from pandas.io import gbq gbq.to_gbq(self, destination_table, project_id=project_id, chunksize=chunksize, reauth=reauth, if_exists=if_exists, auth_local_webserver=auth_local_webserver, table_schema=table_schema, location=location, progress_bar=progress_bar, credentials=credentials, verbose=verbose, private_key=private_key) @classmethod def from_records(cls, data, index=None, exclude=None, columns=None, coerce_float=False, nrows=None): """ Convert structured or record ndarray to DataFrame. Parameters ---------- data : ndarray (structured dtype), list of tuples, dict, or DataFrame index : string, list of fields, array-like Field of array to use as the index, alternately a specific set of input labels to use exclude : sequence, default None Columns or fields to exclude columns : sequence, default None Column names to use. If the passed data do not have names associated with them, this argument provides names for the columns. Otherwise this argument indicates the order of the columns in the result (any names not found in the data will become all-NA columns) coerce_float : boolean, default False Attempt to convert values of non-string, non-numeric objects (like decimal.Decimal) to floating point, useful for SQL result sets nrows : int, default None Number of rows to read if data is an iterator Returns ------- DataFrame """ # Make a copy of the input columns so we can modify it if columns is not None: columns = ensure_index(columns) if is_iterator(data): if nrows == 0: return cls() try: first_row = next(data) except StopIteration: return cls(index=index, columns=columns) dtype = None if hasattr(first_row, 'dtype') and first_row.dtype.names: dtype = first_row.dtype values = [first_row] if nrows is None: values += data else: values.extend(itertools.islice(data, nrows - 1)) if dtype is not None: data = np.array(values, dtype=dtype) else: data = values if isinstance(data, dict): if columns is None: columns = arr_columns = ensure_index(sorted(data)) arrays = [data[k] for k in columns] else: arrays = [] arr_columns = [] for k, v in data.items(): if k in columns: arr_columns.append(k) arrays.append(v) arrays, arr_columns = reorder_arrays(arrays, arr_columns, columns) elif isinstance(data, (np.ndarray, DataFrame)): arrays, columns = to_arrays(data, columns) if columns is not None: columns = ensure_index(columns) arr_columns = columns else: arrays, arr_columns = to_arrays(data, columns, coerce_float=coerce_float) arr_columns = ensure_index(arr_columns) if columns is not None: columns = ensure_index(columns) else: columns = arr_columns if exclude is None: exclude = set() else: exclude = set(exclude) result_index = None if index is not None: if (isinstance(index, str) or not hasattr(index, "__iter__")): i = columns.get_loc(index) exclude.add(index) if len(arrays) > 0: result_index = Index(arrays[i], name=index) else: result_index = Index([], name=index) else: try: index_data = [arrays[arr_columns.get_loc(field)] for field in index] result_index = ensure_index_from_sequences(index_data, names=index) exclude.update(index) except Exception: result_index = index if any(exclude): arr_exclude = [x for x in exclude if x in arr_columns] to_remove = [arr_columns.get_loc(col) for col in arr_exclude] arrays = [v for i, v in enumerate(arrays) if i not in to_remove] arr_columns = arr_columns.drop(arr_exclude) columns = columns.drop(exclude) mgr = arrays_to_mgr(arrays, arr_columns, result_index, columns) return cls(mgr) def to_records(self, index=True, convert_datetime64=None, column_dtypes=None, index_dtypes=None): """ Convert DataFrame to a NumPy record array. Index will be included as the first field of the record array if requested. Parameters ---------- index : bool, default True Include index in resulting record array, stored in 'index' field or using the index label, if set. convert_datetime64 : bool, default None .. deprecated:: 0.23.0 Whether to convert the index to datetime.datetime if it is a DatetimeIndex. column_dtypes : str, type, dict, default None .. versionadded:: 0.24.0 If a string or type, the data type to store all columns. If a dictionary, a mapping of column names and indices (zero-indexed) to specific data types. index_dtypes : str, type, dict, default None .. versionadded:: 0.24.0 If a string or type, the data type to store all index levels. If a dictionary, a mapping of index level names and indices (zero-indexed) to specific data types. This mapping is applied only if `index=True`. Returns ------- numpy.recarray NumPy ndarray with the DataFrame labels as fields and each row of the DataFrame as entries. See Also -------- DataFrame.from_records: Convert structured or record ndarray to DataFrame. numpy.recarray: An ndarray that allows field access using attributes, analogous to typed columns in a spreadsheet. Examples -------- >>> df = pd.DataFrame({'A': [1, 2], 'B': [0.5, 0.75]}, ... index=['a', 'b']) >>> df A B a 1 0.50 b 2 0.75 >>> df.to_records() rec.array([('a', 1, 0.5 ), ('b', 2, 0.75)], dtype=[('index', 'O'), ('A', '<i8'), ('B', '<f8')]) If the DataFrame index has no label then the recarray field name is set to 'index'. If the index has a label then this is used as the field name: >>> df.index = df.index.rename("I") >>> df.to_records() rec.array([('a', 1, 0.5 ), ('b', 2, 0.75)], dtype=[('I', 'O'), ('A', '<i8'), ('B', '<f8')]) The index can be excluded from the record array: >>> df.to_records(index=False) rec.array([(1, 0.5 ), (2, 0.75)], dtype=[('A', '<i8'), ('B', '<f8')]) Data types can be specified for the columns: >>> df.to_records(column_dtypes={"A": "int32"}) rec.array([('a', 1, 0.5 ), ('b', 2, 0.75)], dtype=[('I', 'O'), ('A', '<i4'), ('B', '<f8')]) As well as for the index: >>> df.to_records(index_dtypes="<S2") rec.array([(b'a', 1, 0.5 ), (b'b', 2, 0.75)], dtype=[('I', 'S2'), ('A', '<i8'), ('B', '<f8')]) >>> index_dtypes = "<S{}".format(df.index.str.len().max()) >>> df.to_records(index_dtypes=index_dtypes) rec.array([(b'a', 1, 0.5 ), (b'b', 2, 0.75)], dtype=[('I', 'S1'), ('A', '<i8'), ('B', '<f8')]) """ if convert_datetime64 is not None: warnings.warn("The 'convert_datetime64' parameter is " "deprecated and will be removed in a future " "version", FutureWarning, stacklevel=2) if index: if is_datetime64_any_dtype(self.index) and convert_datetime64: ix_vals = [self.index.to_pydatetime()] else: if isinstance(self.index, MultiIndex): # array of tuples to numpy cols. copy copy copy ix_vals = list(map(np.array, zip(*self.index.values))) else: ix_vals = [self.index.values] arrays = ix_vals + [self[c].get_values() for c in self.columns] count = 0 index_names = list(self.index.names) if isinstance(self.index, MultiIndex): for i, n in enumerate(index_names): if n is None: index_names[i] = 'level_%d' % count count += 1 elif index_names[0] is None: index_names = ['index'] names = [str(name) for name in itertools.chain(index_names, self.columns)] else: arrays = [self[c].get_values() for c in self.columns] names = [str(c) for c in self.columns] index_names = [] index_len = len(index_names) formats = [] for i, v in enumerate(arrays): index = i # When the names and arrays are collected, we # first collect those in the DataFrame's index, # followed by those in its columns. # # Thus, the total length of the array is: # len(index_names) + len(DataFrame.columns). # # This check allows us to see whether we are # handling a name / array in the index or column. if index < index_len: dtype_mapping = index_dtypes name = index_names[index] else: index -= index_len dtype_mapping = column_dtypes name = self.columns[index] # We have a dictionary, so we get the data type # associated with the index or column (which can # be denoted by its name in the DataFrame or its # position in DataFrame's array of indices or # columns, whichever is applicable. if is_dict_like(dtype_mapping): if name in dtype_mapping: dtype_mapping = dtype_mapping[name] elif index in dtype_mapping: dtype_mapping = dtype_mapping[index] else: dtype_mapping = None # If no mapping can be found, use the array's # dtype attribute for formatting. # # A valid dtype must either be a type or # string naming a type. if dtype_mapping is None: formats.append(v.dtype) elif isinstance(dtype_mapping, (type, np.dtype, str)): formats.append(dtype_mapping) else: element = "row" if i < index_len else "column" msg = ("Invalid dtype {dtype} specified for " "{element} {name}").format(dtype=dtype_mapping, element=element, name=name) raise ValueError(msg) return np.rec.fromarrays( arrays, dtype={'names': names, 'formats': formats} ) @classmethod def from_items(cls, items, columns=None, orient='columns'): """ Construct a DataFrame from a list of tuples. .. deprecated:: 0.23.0 `from_items` is deprecated and will be removed in a future version. Use :meth:`DataFrame.from_dict(dict(items)) <DataFrame.from_dict>` instead. :meth:`DataFrame.from_dict(OrderedDict(items)) <DataFrame.from_dict>` may be used to preserve the key order. Convert (key, value) pairs to DataFrame. The keys will be the axis index (usually the columns, but depends on the specified orientation). The values should be arrays or Series. Parameters ---------- items : sequence of (key, value) pairs Values should be arrays or Series. columns : sequence of column labels, optional Must be passed if orient='index'. orient : {'columns', 'index'}, default 'columns' The "orientation" of the data. If the keys of the input correspond to column labels, pass 'columns' (default). Otherwise if the keys correspond to the index, pass 'index'. Returns ------- DataFrame """ warnings.warn("from_items is deprecated. Please use " "DataFrame.from_dict(dict(items), ...) instead. " "DataFrame.from_dict(OrderedDict(items)) may be used to " "preserve the key order.", FutureWarning, stacklevel=2) keys, values = zip(*items) if orient == 'columns': if columns is not None: columns = ensure_index(columns) idict = dict(items) if len(idict) < len(items): if not columns.equals(ensure_index(keys)): raise ValueError('With non-unique item names, passed ' 'columns must be identical') arrays = values else: arrays = [idict[k] for k in columns if k in idict] else: columns = ensure_index(keys) arrays = values # GH 17312 # Provide more informative error msg when scalar values passed try: return cls._from_arrays(arrays, columns, None) except ValueError: if not is_nested_list_like(values): raise ValueError('The value in each (key, value) pair ' 'must be an array, Series, or dict') elif orient == 'index': if columns is None: raise TypeError("Must pass columns with orient='index'") keys = ensure_index(keys) # GH 17312 # Provide more informative error msg when scalar values passed try: arr = np.array(values, dtype=object).T data = [lib.maybe_convert_objects(v) for v in arr] return cls._from_arrays(data, columns, keys) except TypeError: if not is_nested_list_like(values): raise ValueError('The value in each (key, value) pair ' 'must be an array, Series, or dict') else: # pragma: no cover raise ValueError("'orient' must be either 'columns' or 'index'") @classmethod def _from_arrays(cls, arrays, columns, index, dtype=None): mgr = arrays_to_mgr(arrays, columns, index, columns, dtype=dtype) return cls(mgr) @classmethod def from_csv(cls, path, header=0, sep=',', index_col=0, parse_dates=True, encoding=None, tupleize_cols=None, infer_datetime_format=False): """ Read CSV file. .. deprecated:: 0.21.0 Use :func:`read_csv` instead. It is preferable to use the more powerful :func:`read_csv` for most general purposes, but ``from_csv`` makes for an easy roundtrip to and from a file (the exact counterpart of ``to_csv``), especially with a DataFrame of time series data. This method only differs from the preferred :func:`read_csv` in some defaults: - `index_col` is ``0`` instead of ``None`` (take first column as index by default) - `parse_dates` is ``True`` instead of ``False`` (try parsing the index as datetime by default) So a ``pd.DataFrame.from_csv(path)`` can be replaced by ``pd.read_csv(path, index_col=0, parse_dates=True)``. Parameters ---------- path : string file path or file handle / StringIO header : int, default 0 Row to use as header (skip prior rows) sep : string, default ',' Field delimiter index_col : int or sequence, default 0 Column to use for index. If a sequence is given, a MultiIndex is used. Different default from read_table parse_dates : boolean, default True Parse dates. Different default from read_table tupleize_cols : boolean, default False write multi_index columns as a list of tuples (if True) or new (expanded format) if False) infer_datetime_format : boolean, default False If True and `parse_dates` is True for a column, try to infer the datetime format based on the first datetime string. If the format can be inferred, there often will be a large parsing speed-up. Returns ------- DataFrame See Also -------- read_csv """ warnings.warn("from_csv is deprecated. Please use read_csv(...) " "instead. Note that some of the default arguments are " "different, so please refer to the documentation " "for from_csv when changing your function calls", FutureWarning, stacklevel=2) from pandas.io.parsers import read_csv return read_csv(path, header=header, sep=sep, parse_dates=parse_dates, index_col=index_col, encoding=encoding, tupleize_cols=tupleize_cols, infer_datetime_format=infer_datetime_format) def to_sparse(self, fill_value=None, kind='block'): """ Convert to SparseDataFrame. .. deprecated:: 0.25.0 Implement the sparse version of the DataFrame meaning that any data matching a specific value it's omitted in the representation. The sparse DataFrame allows for a more efficient storage. Parameters ---------- fill_value : float, default None The specific value that should be omitted in the representation. kind : {'block', 'integer'}, default 'block' The kind of the SparseIndex tracking where data is not equal to the fill value: - 'block' tracks only the locations and sizes of blocks of data. - 'integer' keeps an array with all the locations of the data. In most cases 'block' is recommended, since it's more memory efficient. Returns ------- SparseDataFrame The sparse representation of the DataFrame. See Also -------- DataFrame.to_dense : Converts the DataFrame back to the its dense form. Examples -------- >>> df = pd.DataFrame([(np.nan, np.nan), ... (1., np.nan), ... (np.nan, 1.)]) >>> df 0 1 0 NaN NaN 1 1.0 NaN 2 NaN 1.0 >>> type(df) <class 'pandas.core.frame.DataFrame'> >>> sdf = df.to_sparse() # doctest: +SKIP >>> sdf # doctest: +SKIP 0 1 0 NaN NaN 1 1.0 NaN 2 NaN 1.0 >>> type(sdf) # doctest: +SKIP <class 'pandas.core.sparse.frame.SparseDataFrame'> """ warnings.warn("DataFrame.to_sparse is deprecated and will be removed " "in a future version", FutureWarning, stacklevel=2) from pandas.core.sparse.api import SparseDataFrame with warnings.catch_warnings(): warnings.filterwarnings("ignore", message="SparseDataFrame") return SparseDataFrame(self._series, index=self.index, columns=self.columns, default_kind=kind, default_fill_value=fill_value) @deprecate_kwarg(old_arg_name='encoding', new_arg_name=None) def to_stata(self, fname, convert_dates=None, write_index=True, encoding="latin-1", byteorder=None, time_stamp=None, data_label=None, variable_labels=None, version=114, convert_strl=None): """ Export DataFrame object to Stata dta format. Writes the DataFrame to a Stata dataset file. "dta" files contain a Stata dataset. Parameters ---------- fname : str, buffer or path object String, path object (pathlib.Path or py._path.local.LocalPath) or object implementing a binary write() function. If using a buffer then the buffer will not be automatically closed after the file data has been written. convert_dates : dict Dictionary mapping columns containing datetime types to stata internal format to use when writing the dates. Options are 'tc', 'td', 'tm', 'tw', 'th', 'tq', 'ty'. Column can be either an integer or a name. Datetime columns that do not have a conversion type specified will be converted to 'tc'. Raises NotImplementedError if a datetime column has timezone information. write_index : bool Write the index to Stata dataset. encoding : str Default is latin-1. Unicode is not supported. byteorder : str Can be ">", "<", "little", or "big". default is `sys.byteorder`. time_stamp : datetime A datetime to use as file creation date. Default is the current time. data_label : str, optional A label for the data set. Must be 80 characters or smaller. variable_labels : dict Dictionary containing columns as keys and variable labels as values. Each label must be 80 characters or smaller. .. versionadded:: 0.19.0 version : {114, 117}, default 114 Version to use in the output dta file. Version 114 can be used read by Stata 10 and later. Version 117 can be read by Stata 13 or later. Version 114 limits string variables to 244 characters or fewer while 117 allows strings with lengths up to 2,000,000 characters. .. versionadded:: 0.23.0 convert_strl : list, optional List of column names to convert to string columns to Stata StrL format. Only available if version is 117. Storing strings in the StrL format can produce smaller dta files if strings have more than 8 characters and values are repeated. .. versionadded:: 0.23.0 Raises ------ NotImplementedError * If datetimes contain timezone information * Column dtype is not representable in Stata ValueError * Columns listed in convert_dates are neither datetime64[ns] or datetime.datetime * Column listed in convert_dates is not in DataFrame * Categorical label contains more than 32,000 characters .. versionadded:: 0.19.0 See Also -------- read_stata : Import Stata data files. io.stata.StataWriter : Low-level writer for Stata data files. io.stata.StataWriter117 : Low-level writer for version 117 files. Examples -------- >>> df = pd.DataFrame({'animal': ['falcon', 'parrot', 'falcon', ... 'parrot'], ... 'speed': [350, 18, 361, 15]}) >>> df.to_stata('animals.dta') # doctest: +SKIP """ kwargs = {} if version not in (114, 117): raise ValueError('Only formats 114 and 117 supported.') if version == 114: if convert_strl is not None: raise ValueError('strl support is only available when using ' 'format 117') from pandas.io.stata import StataWriter as statawriter else: from pandas.io.stata import StataWriter117 as statawriter kwargs['convert_strl'] = convert_strl writer = statawriter(fname, self, convert_dates=convert_dates, byteorder=byteorder, time_stamp=time_stamp, data_label=data_label, write_index=write_index, variable_labels=variable_labels, **kwargs) writer.write_file() def to_feather(self, fname): """ Write out the binary feather-format for DataFrames. .. versionadded:: 0.20.0 Parameters ---------- fname : str string file path """ from pandas.io.feather_format import to_feather to_feather(self, fname) def to_parquet(self, fname, engine='auto', compression='snappy', index=None, partition_cols=None, **kwargs): """ Write a DataFrame to the binary parquet format. .. versionadded:: 0.21.0 This function writes the dataframe as a `parquet file <https://parquet.apache.org/>`_. You can choose different parquet backends, and have the option of compression. See :ref:`the user guide <io.parquet>` for more details. Parameters ---------- fname : str File path or Root Directory path. Will be used as Root Directory path while writing a partitioned dataset. .. versionchanged:: 0.24.0 engine : {'auto', 'pyarrow', 'fastparquet'}, default 'auto' Parquet library to use. If 'auto', then the option ``io.parquet.engine`` is used. The default ``io.parquet.engine`` behavior is to try 'pyarrow', falling back to 'fastparquet' if 'pyarrow' is unavailable. compression : {'snappy', 'gzip', 'brotli', None}, default 'snappy' Name of the compression to use. Use ``None`` for no compression. index : bool, default None If ``True``, include the dataframe's index(es) in the file output. If ``False``, they will not be written to the file. If ``None``, the behavior depends on the chosen engine. .. versionadded:: 0.24.0 partition_cols : list, optional, default None Column names by which to partition the dataset Columns are partitioned in the order they are given .. versionadded:: 0.24.0 **kwargs Additional arguments passed to the parquet library. See :ref:`pandas io <io.parquet>` for more details. See Also -------- read_parquet : Read a parquet file. DataFrame.to_csv : Write a csv file. DataFrame.to_sql : Write to a sql table. DataFrame.to_hdf : Write to hdf. Notes ----- This function requires either the `fastparquet <https://pypi.org/project/fastparquet>`_ or `pyarrow <https://arrow.apache.org/docs/python/>`_ library. Examples -------- >>> df = pd.DataFrame(data={'col1': [1, 2], 'col2': [3, 4]}) >>> df.to_parquet('df.parquet.gzip', ... compression='gzip') # doctest: +SKIP >>> pd.read_parquet('df.parquet.gzip') # doctest: +SKIP col1 col2 0 1 3 1 2 4 """ from pandas.io.parquet import to_parquet to_parquet(self, fname, engine, compression=compression, index=index, partition_cols=partition_cols, **kwargs) @Substitution(header='Whether to print column labels, default True', col_space_type='str or int', col_space='The minimum width of each column in CSS length ' 'units. An int is assumed to be px units.\n\n' ' .. versionadded:: 0.25.0\n' ' Ability to use str') @Substitution(shared_params=fmt.common_docstring, returns=fmt.return_docstring) def to_html(self, buf=None, columns=None, col_space=None, header=True, index=True, na_rep='NaN', formatters=None, float_format=None, sparsify=None, index_names=True, justify=None, max_rows=None, max_cols=None, show_dimensions=False, decimal='.', bold_rows=True, classes=None, escape=True, notebook=False, border=None, table_id=None, render_links=False): """ Render a DataFrame as an HTML table. %(shared_params)s bold_rows : bool, default True Make the row labels bold in the output. classes : str or list or tuple, default None CSS class(es) to apply to the resulting html table. escape : bool, default True Convert the characters <, >, and & to HTML-safe sequences. notebook : {True, False}, default False Whether the generated HTML is for IPython Notebook. border : int A ``border=border`` attribute is included in the opening `<table>` tag. Default ``pd.options.display.html.border``. .. versionadded:: 0.19.0 table_id : str, optional A css id is included in the opening `<table>` tag if specified. .. versionadded:: 0.23.0 render_links : bool, default False Convert URLs to HTML links. .. versionadded:: 0.24.0 %(returns)s See Also -------- to_string : Convert DataFrame to a string. """ if (justify is not None and justify not in fmt._VALID_JUSTIFY_PARAMETERS): raise ValueError("Invalid value for justify parameter") formatter = fmt.DataFrameFormatter(self, buf=buf, columns=columns, col_space=col_space, na_rep=na_rep, formatters=formatters, float_format=float_format, sparsify=sparsify, justify=justify, index_names=index_names, header=header, index=index, bold_rows=bold_rows, escape=escape, max_rows=max_rows, max_cols=max_cols, show_dimensions=show_dimensions, decimal=decimal, table_id=table_id, render_links=render_links) # TODO: a generic formatter wld b in DataFrameFormatter formatter.to_html(classes=classes, notebook=notebook, border=border) if buf is None: return formatter.buf.getvalue() # ---------------------------------------------------------------------- def info(self, verbose=None, buf=None, max_cols=None, memory_usage=None, null_counts=None): """ Print a concise summary of a DataFrame. This method prints information about a DataFrame including the index dtype and column dtypes, non-null values and memory usage. Parameters ---------- verbose : bool, optional Whether to print the full summary. By default, the setting in ``pandas.options.display.max_info_columns`` is followed. buf : writable buffer, defaults to sys.stdout Where to send the output. By default, the output is printed to sys.stdout. Pass a writable buffer if you need to further process the output. max_cols : int, optional When to switch from the verbose to the truncated output. If the DataFrame has more than `max_cols` columns, the truncated output is used. By default, the setting in ``pandas.options.display.max_info_columns`` is used. memory_usage : bool, str, optional Specifies whether total memory usage of the DataFrame elements (including the index) should be displayed. By default, this follows the ``pandas.options.display.memory_usage`` setting. True always show memory usage. False never shows memory usage. A value of 'deep' is equivalent to "True with deep introspection". Memory usage is shown in human-readable units (base-2 representation). Without deep introspection a memory estimation is made based in column dtype and number of rows assuming values consume the same memory amount for corresponding dtypes. With deep memory introspection, a real memory usage calculation is performed at the cost of computational resources. null_counts : bool, optional Whether to show the non-null counts. By default, this is shown only if the frame is smaller than ``pandas.options.display.max_info_rows`` and ``pandas.options.display.max_info_columns``. A value of True always shows the counts, and False never shows the counts. Returns ------- None This method prints a summary of a DataFrame and returns None. See Also -------- DataFrame.describe: Generate descriptive statistics of DataFrame columns. DataFrame.memory_usage: Memory usage of DataFrame columns. Examples -------- >>> int_values = [1, 2, 3, 4, 5] >>> text_values = ['alpha', 'beta', 'gamma', 'delta', 'epsilon'] >>> float_values = [0.0, 0.25, 0.5, 0.75, 1.0] >>> df = pd.DataFrame({"int_col": int_values, "text_col": text_values, ... "float_col": float_values}) >>> df int_col text_col float_col 0 1 alpha 0.00 1 2 beta 0.25 2 3 gamma 0.50 3 4 delta 0.75 4 5 epsilon 1.00 Prints information of all columns: >>> df.info(verbose=True) <class 'pandas.core.frame.DataFrame'> RangeIndex: 5 entries, 0 to 4 Data columns (total 3 columns): int_col 5 non-null int64 text_col 5 non-null object float_col 5 non-null float64 dtypes: float64(1), int64(1), object(1) memory usage: 248.0+ bytes Prints a summary of columns count and its dtypes but not per column information: >>> df.info(verbose=False) <class 'pandas.core.frame.DataFrame'> RangeIndex: 5 entries, 0 to 4 Columns: 3 entries, int_col to float_col dtypes: float64(1), int64(1), object(1) memory usage: 248.0+ bytes Pipe output of DataFrame.info to buffer instead of sys.stdout, get buffer content and writes to a text file: >>> import io >>> buffer = io.StringIO() >>> df.info(buf=buffer) >>> s = buffer.getvalue() >>> with open("df_info.txt", "w", ... encoding="utf-8") as f: # doctest: +SKIP ... f.write(s) 260 The `memory_usage` parameter allows deep introspection mode, specially useful for big DataFrames and fine-tune memory optimization: >>> random_strings_array = np.random.choice(['a', 'b', 'c'], 10 ** 6) >>> df = pd.DataFrame({ ... 'column_1': np.random.choice(['a', 'b', 'c'], 10 ** 6), ... 'column_2': np.random.choice(['a', 'b', 'c'], 10 ** 6), ... 'column_3': np.random.choice(['a', 'b', 'c'], 10 ** 6) ... }) >>> df.info() <class 'pandas.core.frame.DataFrame'> RangeIndex: 1000000 entries, 0 to 999999 Data columns (total 3 columns): column_1 1000000 non-null object column_2 1000000 non-null object column_3 1000000 non-null object dtypes: object(3) memory usage: 22.9+ MB >>> df.info(memory_usage='deep') <class 'pandas.core.frame.DataFrame'> RangeIndex: 1000000 entries, 0 to 999999 Data columns (total 3 columns): column_1 1000000 non-null object column_2 1000000 non-null object column_3 1000000 non-null object dtypes: object(3) memory usage: 188.8 MB """ if buf is None: # pragma: no cover buf = sys.stdout lines = [] lines.append(str(type(self))) lines.append(self.index._summary()) if len(self.columns) == 0: lines.append('Empty {name}'.format(name=type(self).__name__)) fmt.buffer_put_lines(buf, lines) return cols = self.columns # hack if max_cols is None: max_cols = get_option('display.max_info_columns', len(self.columns) + 1) max_rows = get_option('display.max_info_rows', len(self) + 1) if null_counts is None: show_counts = ((len(self.columns) <= max_cols) and (len(self) < max_rows)) else: show_counts = null_counts exceeds_info_cols = len(self.columns) > max_cols def _verbose_repr(): lines.append('Data columns (total %d columns):' % len(self.columns)) space = max(len(pprint_thing(k)) for k in self.columns) + 4 counts = None tmpl = "{count}{dtype}" if show_counts: counts = self.count() if len(cols) != len(counts): # pragma: no cover raise AssertionError( 'Columns must equal counts ' '({cols:d} != {counts:d})'.format( cols=len(cols), counts=len(counts))) tmpl = "{count} non-null {dtype}" dtypes = self.dtypes for i, col in enumerate(self.columns): dtype = dtypes.iloc[i] col = pprint_thing(col) count = "" if show_counts: count = counts.iloc[i] lines.append(_put_str(col, space) + tmpl.format(count=count, dtype=dtype)) def _non_verbose_repr(): lines.append(self.columns._summary(name='Columns')) def _sizeof_fmt(num, size_qualifier): # returns size in human readable format for x in ['bytes', 'KB', 'MB', 'GB', 'TB']: if num < 1024.0: return ("{num:3.1f}{size_q} " "{x}".format(num=num, size_q=size_qualifier, x=x)) num /= 1024.0 return "{num:3.1f}{size_q} {pb}".format(num=num, size_q=size_qualifier, pb='PB') if verbose: _verbose_repr() elif verbose is False: # specifically set to False, not nesc None _non_verbose_repr() else: if exceeds_info_cols: _non_verbose_repr() else: _verbose_repr() counts = self.get_dtype_counts() dtypes = ['{k}({kk:d})'.format(k=k[0], kk=k[1]) for k in sorted(counts.items())] lines.append('dtypes: {types}'.format(types=', '.join(dtypes))) if memory_usage is None: memory_usage = get_option('display.memory_usage') if memory_usage: # append memory usage of df to display size_qualifier = '' if memory_usage == 'deep': deep = True else: # size_qualifier is just a best effort; not guaranteed to catch # all cases (e.g., it misses categorical data even with object # categories) deep = False if ('object' in counts or self.index._is_memory_usage_qualified()): size_qualifier = '+' mem_usage = self.memory_usage(index=True, deep=deep).sum() lines.append("memory usage: {mem}\n".format( mem=_sizeof_fmt(mem_usage, size_qualifier))) fmt.buffer_put_lines(buf, lines) def memory_usage(self, index=True, deep=False): """ Return the memory usage of each column in bytes. The memory usage can optionally include the contribution of the index and elements of `object` dtype. This value is displayed in `DataFrame.info` by default. This can be suppressed by setting ``pandas.options.display.memory_usage`` to False. Parameters ---------- index : bool, default True Specifies whether to include the memory usage of the DataFrame's index in returned Series. If ``index=True``, the memory usage of the index is the first item in the output. deep : bool, default False If True, introspect the data deeply by interrogating `object` dtypes for system-level memory consumption, and include it in the returned values. Returns ------- Series A Series whose index is the original column names and whose values is the memory usage of each column in bytes. See Also -------- numpy.ndarray.nbytes : Total bytes consumed by the elements of an ndarray. Series.memory_usage : Bytes consumed by a Series. Categorical : Memory-efficient array for string values with many repeated values. DataFrame.info : Concise summary of a DataFrame. Examples -------- >>> dtypes = ['int64', 'float64', 'complex128', 'object', 'bool'] >>> data = dict([(t, np.ones(shape=5000).astype(t)) ... for t in dtypes]) >>> df = pd.DataFrame(data) >>> df.head() int64 float64 complex128 object bool 0 1 1.0 1.0+0.0j 1 True 1 1 1.0 1.0+0.0j 1 True 2 1 1.0 1.0+0.0j 1 True 3 1 1.0 1.0+0.0j 1 True 4 1 1.0 1.0+0.0j 1 True >>> df.memory_usage() Index 128 int64 40000 float64 40000 complex128 80000 object 40000 bool 5000 dtype: int64 >>> df.memory_usage(index=False) int64 40000 float64 40000 complex128 80000 object 40000 bool 5000 dtype: int64 The memory footprint of `object` dtype columns is ignored by default: >>> df.memory_usage(deep=True) Index 128 int64 40000 float64 40000 complex128 80000 object 160000 bool 5000 dtype: int64 Use a Categorical for efficient storage of an object-dtype column with many repeated values. >>> df['object'].astype('category').memory_usage(deep=True) 5216 """ result = Series([c.memory_usage(index=False, deep=deep) for col, c in self.iteritems()], index=self.columns) if index: result = Series(self.index.memory_usage(deep=deep), index=['Index']).append(result) return result def transpose(self, *args, **kwargs): """ Transpose index and columns. Reflect the DataFrame over its main diagonal by writing rows as columns and vice-versa. The property :attr:`.T` is an accessor to the method :meth:`transpose`. Parameters ---------- copy : bool, default False If True, the underlying data is copied. Otherwise (default), no copy is made if possible. *args, **kwargs Additional keywords have no effect but might be accepted for compatibility with numpy. Returns ------- DataFrame The transposed DataFrame. See Also -------- numpy.transpose : Permute the dimensions of a given array. Notes ----- Transposing a DataFrame with mixed dtypes will result in a homogeneous DataFrame with the `object` dtype. In such a case, a copy of the data is always made. Examples -------- **Square DataFrame with homogeneous dtype** >>> d1 = {'col1': [1, 2], 'col2': [3, 4]} >>> df1 = pd.DataFrame(data=d1) >>> df1 col1 col2 0 1 3 1 2 4 >>> df1_transposed = df1.T # or df1.transpose() >>> df1_transposed 0 1 col1 1 2 col2 3 4 When the dtype is homogeneous in the original DataFrame, we get a transposed DataFrame with the same dtype: >>> df1.dtypes col1 int64 col2 int64 dtype: object >>> df1_transposed.dtypes 0 int64 1 int64 dtype: object **Non-square DataFrame with mixed dtypes** >>> d2 = {'name': ['Alice', 'Bob'], ... 'score': [9.5, 8], ... 'employed': [False, True], ... 'kids': [0, 0]} >>> df2 = pd.DataFrame(data=d2) >>> df2 name score employed kids 0 Alice 9.5 False 0 1 Bob 8.0 True 0 >>> df2_transposed = df2.T # or df2.transpose() >>> df2_transposed 0 1 name Alice Bob score 9.5 8 employed False True kids 0 0 When the DataFrame has mixed dtypes, we get a transposed DataFrame with the `object` dtype: >>> df2.dtypes name object score float64 employed bool kids int64 dtype: object >>> df2_transposed.dtypes 0 object 1 object dtype: object """ nv.validate_transpose(args, dict()) return super().transpose(1, 0, **kwargs) T = property(transpose) # ---------------------------------------------------------------------- # Picklability # legacy pickle formats def _unpickle_frame_compat(self, state): # pragma: no cover if len(state) == 2: # pragma: no cover series, idx = state columns = sorted(series) else: series, cols, idx = state columns = com._unpickle_array(cols) index = com._unpickle_array(idx) self._data = self._init_dict(series, index, columns, None) def _unpickle_matrix_compat(self, state): # pragma: no cover # old unpickling (vals, idx, cols), object_state = state index = com._unpickle_array(idx) dm = DataFrame(vals, index=index, columns=com._unpickle_array(cols), copy=False) if object_state is not None: ovals, _, ocols = object_state objects = DataFrame(ovals, index=index, columns=com._unpickle_array(ocols), copy=False) dm = dm.join(objects) self._data = dm._data # ---------------------------------------------------------------------- # Getting and setting elements def get_value(self, index, col, takeable=False): """ Quickly retrieve single value at passed column and index. .. deprecated:: 0.21.0 Use .at[] or .iat[] accessors instead. Parameters ---------- index : row label col : column label takeable : interpret the index/col as indexers, default False Returns ------- scalar """ warnings.warn("get_value is deprecated and will be removed " "in a future release. Please use " ".at[] or .iat[] accessors instead", FutureWarning, stacklevel=2) return self._get_value(index, col, takeable=takeable) def _get_value(self, index, col, takeable=False): if takeable: series = self._iget_item_cache(col) return com.maybe_box_datetimelike(series._values[index]) series = self._get_item_cache(col) engine = self.index._engine try: return engine.get_value(series._values, index) except KeyError: # GH 20629 if self.index.nlevels > 1: # partial indexing forbidden raise except (TypeError, ValueError): pass # we cannot handle direct indexing # use positional col = self.columns.get_loc(col) index = self.index.get_loc(index) return self._get_value(index, col, takeable=True) _get_value.__doc__ = get_value.__doc__ def set_value(self, index, col, value, takeable=False): """ Put single value at passed column and index. .. deprecated:: 0.21.0 Use .at[] or .iat[] accessors instead. Parameters ---------- index : row label col : column label value : scalar takeable : interpret the index/col as indexers, default False Returns ------- DataFrame If label pair is contained, will be reference to calling DataFrame, otherwise a new object. """ warnings.warn("set_value is deprecated and will be removed " "in a future release. Please use " ".at[] or .iat[] accessors instead", FutureWarning, stacklevel=2) return self._set_value(index, col, value, takeable=takeable) def _set_value(self, index, col, value, takeable=False): try: if takeable is True: series = self._iget_item_cache(col) return series._set_value(index, value, takeable=True) series = self._get_item_cache(col) engine = self.index._engine engine.set_value(series._values, index, value) return self except (KeyError, TypeError): # set using a non-recursive method & reset the cache if takeable: self.iloc[index, col] = value else: self.loc[index, col] = value self._item_cache.pop(col, None) return self _set_value.__doc__ = set_value.__doc__ def _ixs(self, i, axis=0): """ Parameters ---------- i : int, slice, or sequence of integers axis : int Notes ----- If slice passed, the resulting data will be a view. """ # irow if axis == 0: if isinstance(i, slice): return self[i] else: label = self.index[i] if isinstance(label, Index): # a location index by definition result = self.take(i, axis=axis) copy = True else: new_values = self._data.fast_xs(i) if is_scalar(new_values): return new_values # if we are a copy, mark as such copy = (isinstance(new_values, np.ndarray) and new_values.base is None) result = self._constructor_sliced(new_values, index=self.columns, name=self.index[i], dtype=new_values.dtype) result._set_is_copy(self, copy=copy) return result # icol else: label = self.columns[i] if isinstance(i, slice): # need to return view lab_slice = slice(label[0], label[-1]) return self.loc[:, lab_slice] else: if isinstance(label, Index): return self._take(i, axis=1) index_len = len(self.index) # if the values returned are not the same length # as the index (iow a not found value), iget returns # a 0-len ndarray. This is effectively catching # a numpy error (as numpy should really raise) values = self._data.iget(i) if index_len and not len(values): values = np.array([np.nan] * index_len, dtype=object) result = self._box_col_values(values, label) # this is a cached value, mark it so result._set_as_cached(label, self) return result def __getitem__(self, key): key = lib.item_from_zerodim(key) key = com.apply_if_callable(key, self) # shortcut if the key is in columns try: if self.columns.is_unique and key in self.columns: if self.columns.nlevels > 1: return self._getitem_multilevel(key) return self._get_item_cache(key) except (TypeError, ValueError): # The TypeError correctly catches non hashable "key" (e.g. list) # The ValueError can be removed once GH #21729 is fixed pass # Do we have a slicer (on rows)? indexer = convert_to_index_sliceable(self, key) if indexer is not None: return self._slice(indexer, axis=0) # Do we have a (boolean) DataFrame? if isinstance(key, DataFrame): return self._getitem_frame(key) # Do we have a (boolean) 1d indexer? if com.is_bool_indexer(key): return self._getitem_bool_array(key) # We are left with two options: a single key, and a collection of keys, # We interpret tuples as collections only for non-MultiIndex is_single_key = isinstance(key, tuple) or not is_list_like(key) if is_single_key: if self.columns.nlevels > 1: return self._getitem_multilevel(key) indexer = self.columns.get_loc(key) if is_integer(indexer): indexer = [indexer] else: if is_iterator(key): key = list(key) indexer = self.loc._convert_to_indexer(key, axis=1, raise_missing=True) # take() does not accept boolean indexers if getattr(indexer, "dtype", None) == bool: indexer = np.where(indexer)[0] data = self._take(indexer, axis=1) if is_single_key: # What does looking for a single key in a non-unique index return? # The behavior is inconsistent. It returns a Series, except when # - the key itself is repeated (test on data.shape, #9519), or # - we have a MultiIndex on columns (test on self.columns, #21309) if data.shape[1] == 1 and not isinstance(self.columns, MultiIndex): data = data[key] return data def _getitem_bool_array(self, key): # also raises Exception if object array with NA values # warning here just in case -- previously __setitem__ was # reindexing but __getitem__ was not; it seems more reasonable to # go with the __setitem__ behavior since that is more consistent # with all other indexing behavior if isinstance(key, Series) and not key.index.equals(self.index): warnings.warn("Boolean Series key will be reindexed to match " "DataFrame index.", UserWarning, stacklevel=3) elif len(key) != len(self.index): raise ValueError('Item wrong length %d instead of %d.' % (len(key), len(self.index))) # check_bool_indexer will throw exception if Series key cannot # be reindexed to match DataFrame rows key = check_bool_indexer(self.index, key) indexer = key.nonzero()[0] return self._take(indexer, axis=0) def _getitem_multilevel(self, key): loc = self.columns.get_loc(key) if isinstance(loc, (slice, Series, np.ndarray, Index)): new_columns = self.columns[loc] result_columns = maybe_droplevels(new_columns, key) if self._is_mixed_type: result = self.reindex(columns=new_columns) result.columns = result_columns else: new_values = self.values[:, loc] result = self._constructor(new_values, index=self.index, columns=result_columns) result = result.__finalize__(self) # If there is only one column being returned, and its name is # either an empty string, or a tuple with an empty string as its # first element, then treat the empty string as a placeholder # and return the column as if the user had provided that empty # string in the key. If the result is a Series, exclude the # implied empty string from its name. if len(result.columns) == 1: top = result.columns[0] if isinstance(top, tuple): top = top[0] if top == '': result = result[''] if isinstance(result, Series): result = self._constructor_sliced(result, index=self.index, name=key) result._set_is_copy(self) return result else: return self._get_item_cache(key) def _getitem_frame(self, key): if key.values.size and not is_bool_dtype(key.values): raise ValueError('Must pass DataFrame with boolean values only') return self.where(key) def query(self, expr, inplace=False, **kwargs): """ Query the columns of a DataFrame with a boolean expression. Parameters ---------- expr : str The query string to evaluate. You can refer to variables in the environment by prefixing them with an '@' character like ``@a + b``. .. versionadded:: 0.25.0 You can refer to column names that contain spaces by surrounding them in backticks. For example, if one of your columns is called ``a a`` and you want to sum it with ``b``, your query should be ```a a` + b``. inplace : bool Whether the query should modify the data in place or return a modified copy. **kwargs See the documentation for :func:`eval` for complete details on the keyword arguments accepted by :meth:`DataFrame.query`. .. versionadded:: 0.18.0 Returns ------- DataFrame DataFrame resulting from the provided query expression. See Also -------- eval : Evaluate a string describing operations on DataFrame columns. DataFrame.eval : Evaluate a string describing operations on DataFrame columns. Notes ----- The result of the evaluation of this expression is first passed to :attr:`DataFrame.loc` and if that fails because of a multidimensional key (e.g., a DataFrame) then the result will be passed to :meth:`DataFrame.__getitem__`. This method uses the top-level :func:`eval` function to evaluate the passed query. The :meth:`~pandas.DataFrame.query` method uses a slightly modified Python syntax by default. For example, the ``&`` and ``|`` (bitwise) operators have the precedence of their boolean cousins, :keyword:`and` and :keyword:`or`. This *is* syntactically valid Python, however the semantics are different. You can change the semantics of the expression by passing the keyword argument ``parser='python'``. This enforces the same semantics as evaluation in Python space. Likewise, you can pass ``engine='python'`` to evaluate an expression using Python itself as a backend. This is not recommended as it is inefficient compared to using ``numexpr`` as the engine. The :attr:`DataFrame.index` and :attr:`DataFrame.columns` attributes of the :class:`~pandas.DataFrame` instance are placed in the query namespace by default, which allows you to treat both the index and columns of the frame as a column in the frame. The identifier ``index`` is used for the frame index; you can also use the name of the index to identify it in a query. Please note that Python keywords may not be used as identifiers. For further details and examples see the ``query`` documentation in :ref:`indexing <indexing.query>`. Examples -------- >>> df = pd.DataFrame({'A': range(1, 6), ... 'B': range(10, 0, -2), ... 'C C': range(10, 5, -1)}) >>> df A B C C 0 1 10 10 1 2 8 9 2 3 6 8 3 4 4 7 4 5 2 6 >>> df.query('A > B') A B C C 4 5 2 6 The previous expression is equivalent to >>> df[df.A > df.B] A B C C 4 5 2 6 For columns with spaces in their name, you can use backtick quoting. >>> df.query('B == `C C`') A B C C 0 1 10 10 The previous expression is equivalent to >>> df[df.B == df['C C']] A B C C 0 1 10 10 """ inplace = validate_bool_kwarg(inplace, 'inplace') if not isinstance(expr, str): msg = "expr must be a string to be evaluated, {0} given" raise ValueError(msg.format(type(expr))) kwargs['level'] = kwargs.pop('level', 0) + 1 kwargs['target'] = None res = self.eval(expr, **kwargs) try: new_data = self.loc[res] except ValueError: # when res is multi-dimensional loc raises, but this is sometimes a # valid query new_data = self[res] if inplace: self._update_inplace(new_data) else: return new_data def eval(self, expr, inplace=False, **kwargs): """ Evaluate a string describing operations on DataFrame columns. Operates on columns only, not specific rows or elements. This allows `eval` to run arbitrary code, which can make you vulnerable to code injection if you pass user input to this function. Parameters ---------- expr : str The expression string to evaluate. inplace : bool, default False If the expression contains an assignment, whether to perform the operation inplace and mutate the existing DataFrame. Otherwise, a new DataFrame is returned. .. versionadded:: 0.18.0. kwargs : dict See the documentation for :func:`eval` for complete details on the keyword arguments accepted by :meth:`~pandas.DataFrame.query`. Returns ------- ndarray, scalar, or pandas object The result of the evaluation. See Also -------- DataFrame.query : Evaluates a boolean expression to query the columns of a frame. DataFrame.assign : Can evaluate an expression or function to create new values for a column. eval : Evaluate a Python expression as a string using various backends. Notes ----- For more details see the API documentation for :func:`~eval`. For detailed examples see :ref:`enhancing performance with eval <enhancingperf.eval>`. Examples -------- >>> df = pd.DataFrame({'A': range(1, 6), 'B': range(10, 0, -2)}) >>> df A B 0 1 10 1 2 8 2 3 6 3 4 4 4 5 2 >>> df.eval('A + B') 0 11 1 10 2 9 3 8 4 7 dtype: int64 Assignment is allowed though by default the original DataFrame is not modified. >>> df.eval('C = A + B') A B C 0 1 10 11 1 2 8 10 2 3 6 9 3 4 4 8 4 5 2 7 >>> df A B 0 1 10 1 2 8 2 3 6 3 4 4 4 5 2 Use ``inplace=True`` to modify the original DataFrame. >>> df.eval('C = A + B', inplace=True) >>> df A B C 0 1 10 11 1 2 8 10 2 3 6 9 3 4 4 8 4 5 2 7 """ from pandas.core.computation.eval import eval as _eval inplace = validate_bool_kwarg(inplace, 'inplace') resolvers = kwargs.pop('resolvers', None) kwargs['level'] = kwargs.pop('level', 0) + 1 if resolvers is None: index_resolvers = self._get_index_resolvers() column_resolvers = \ self._get_space_character_free_column_resolvers() resolvers = column_resolvers, index_resolvers if 'target' not in kwargs: kwargs['target'] = self kwargs['resolvers'] = kwargs.get('resolvers', ()) + tuple(resolvers) return _eval(expr, inplace=inplace, **kwargs) def select_dtypes(self, include=None, exclude=None): """ Return a subset of the DataFrame's columns based on the column dtypes. Parameters ---------- include, exclude : scalar or list-like A selection of dtypes or strings to be included/excluded. At least one of these parameters must be supplied. Returns ------- DataFrame The subset of the frame including the dtypes in ``include`` and excluding the dtypes in ``exclude``. Raises ------ ValueError * If both of ``include`` and ``exclude`` are empty * If ``include`` and ``exclude`` have overlapping elements * If any kind of string dtype is passed in. Notes ----- * To select all *numeric* types, use ``np.number`` or ``'number'`` * To select strings you must use the ``object`` dtype, but note that this will return *all* object dtype columns * See the `numpy dtype hierarchy <http://docs.scipy.org/doc/numpy/reference/arrays.scalars.html>`__ * To select datetimes, use ``np.datetime64``, ``'datetime'`` or ``'datetime64'`` * To select timedeltas, use ``np.timedelta64``, ``'timedelta'`` or ``'timedelta64'`` * To select Pandas categorical dtypes, use ``'category'`` * To select Pandas datetimetz dtypes, use ``'datetimetz'`` (new in 0.20.0) or ``'datetime64[ns, tz]'`` Examples -------- >>> df = pd.DataFrame({'a': [1, 2] * 3, ... 'b': [True, False] * 3, ... 'c': [1.0, 2.0] * 3}) >>> df a b c 0 1 True 1.0 1 2 False 2.0 2 1 True 1.0 3 2 False 2.0 4 1 True 1.0 5 2 False 2.0 >>> df.select_dtypes(include='bool') b 0 True 1 False 2 True 3 False 4 True 5 False >>> df.select_dtypes(include=['float64']) c 0 1.0 1 2.0 2 1.0 3 2.0 4 1.0 5 2.0 >>> df.select_dtypes(exclude=['int']) b c 0 True 1.0 1 False 2.0 2 True 1.0 3 False 2.0 4 True 1.0 5 False 2.0 """ def _get_info_slice(obj, indexer): """Slice the info axis of `obj` with `indexer`.""" if not hasattr(obj, '_info_axis_number'): msg = 'object of type {typ!r} has no info axis' raise TypeError(msg.format(typ=type(obj).__name__)) slices = [slice(None)] * obj.ndim slices[obj._info_axis_number] = indexer return tuple(slices) if not is_list_like(include): include = (include,) if include is not None else () if not is_list_like(exclude): exclude = (exclude,) if exclude is not None else () selection = tuple(map(frozenset, (include, exclude))) if not any(selection): raise ValueError('at least one of include or exclude must be ' 'nonempty') # convert the myriad valid dtypes object to a single representation include, exclude = map( lambda x: frozenset(map(infer_dtype_from_object, x)), selection) for dtypes in (include, exclude): invalidate_string_dtypes(dtypes) # can't both include AND exclude! if not include.isdisjoint(exclude): raise ValueError('include and exclude overlap on {inc_ex}'.format( inc_ex=(include & exclude))) # empty include/exclude -> defaults to True # three cases (we've already raised if both are empty) # case 1: empty include, nonempty exclude # we have True, True, ... True for include, same for exclude # in the loop below we get the excluded # and when we call '&' below we get only the excluded # case 2: nonempty include, empty exclude # same as case 1, but with include # case 3: both nonempty # the "union" of the logic of case 1 and case 2: # we get the included and excluded, and return their logical and include_these = Series(not bool(include), index=self.columns) exclude_these = Series(not bool(exclude), index=self.columns) def is_dtype_instance_mapper(idx, dtype): return idx, functools.partial(issubclass, dtype.type) for idx, f in itertools.starmap(is_dtype_instance_mapper, enumerate(self.dtypes)): if include: # checks for the case of empty include or exclude include_these.iloc[idx] = any(map(f, include)) if exclude: exclude_these.iloc[idx] = not any(map(f, exclude)) dtype_indexer = include_these & exclude_these return self.loc[_get_info_slice(self, dtype_indexer)] def _box_item_values(self, key, values): items = self.columns[self.columns.get_loc(key)] if values.ndim == 2: return self._constructor(values.T, columns=items, index=self.index) else: return self._box_col_values(values, items) def _box_col_values(self, values, items): """ Provide boxed values for a column. """ klass = self._constructor_sliced return klass(values, index=self.index, name=items, fastpath=True) def __setitem__(self, key, value): key = com.apply_if_callable(key, self) # see if we can slice the rows indexer = convert_to_index_sliceable(self, key) if indexer is not None: return self._setitem_slice(indexer, value) if isinstance(key, DataFrame) or getattr(key, 'ndim', None) == 2: self._setitem_frame(key, value) elif isinstance(key, (Series, np.ndarray, list, Index)): self._setitem_array(key, value) else: # set column self._set_item(key, value) def _setitem_slice(self, key, value): self._check_setitem_copy() self.loc._setitem_with_indexer(key, value) def _setitem_array(self, key, value): # also raises Exception if object array with NA values if com.is_bool_indexer(key): if len(key) != len(self.index): raise ValueError('Item wrong length %d instead of %d!' % (len(key), len(self.index))) key = check_bool_indexer(self.index, key) indexer = key.nonzero()[0] self._check_setitem_copy() self.loc._setitem_with_indexer(indexer, value) else: if isinstance(value, DataFrame): if len(value.columns) != len(key): raise ValueError('Columns must be same length as key') for k1, k2 in zip(key, value.columns): self[k1] = value[k2] else: indexer = self.loc._convert_to_indexer(key, axis=1) self._check_setitem_copy() self.loc._setitem_with_indexer((slice(None), indexer), value) def _setitem_frame(self, key, value): # support boolean setting with DataFrame input, e.g. # df[df > df2] = 0 if isinstance(key, np.ndarray): if key.shape != self.shape: raise ValueError( 'Array conditional must be same shape as self' ) key = self._constructor(key, **self._construct_axes_dict()) if key.values.size and not is_bool_dtype(key.values): raise TypeError( 'Must pass DataFrame or 2-d ndarray with boolean values only' ) self._check_inplace_setting(value) self._check_setitem_copy() self._where(-key, value, inplace=True) def _ensure_valid_index(self, value): """ Ensure that if we don't have an index, that we can create one from the passed value. """ # GH5632, make sure that we are a Series convertible if not len(self.index) and is_list_like(value): try: value = Series(value) except (ValueError, NotImplementedError, TypeError): raise ValueError('Cannot set a frame with no defined index ' 'and a value that cannot be converted to a ' 'Series') self._data = self._data.reindex_axis(value.index.copy(), axis=1, fill_value=np.nan) def _set_item(self, key, value): """ Add series to DataFrame in specified column. If series is a numpy-array (not a Series/TimeSeries), it must be the same length as the DataFrames index or an error will be thrown. Series/TimeSeries will be conformed to the DataFrames index to ensure homogeneity. """ self._ensure_valid_index(value) value = self._sanitize_column(key, value) NDFrame._set_item(self, key, value) # check if we are modifying a copy # try to set first as we want an invalid # value exception to occur first if len(self): self._check_setitem_copy() def insert(self, loc, column, value, allow_duplicates=False): """ Insert column into DataFrame at specified location. Raises a ValueError if `column` is already contained in the DataFrame, unless `allow_duplicates` is set to True. Parameters ---------- loc : int Insertion index. Must verify 0 <= loc <= len(columns) column : string, number, or hashable object label of the inserted column value : int, Series, or array-like allow_duplicates : bool, optional """ self._ensure_valid_index(value) value = self._sanitize_column(column, value, broadcast=False) self._data.insert(loc, column, value, allow_duplicates=allow_duplicates) def assign(self, **kwargs): r""" Assign new columns to a DataFrame. Returns a new object with all original columns in addition to new ones. Existing columns that are re-assigned will be overwritten. Parameters ---------- **kwargs : dict of {str: callable or Series} The column names are keywords. If the values are callable, they are computed on the DataFrame and assigned to the new columns. The callable must not change input DataFrame (though pandas doesn't check it). If the values are not callable, (e.g. a Series, scalar, or array), they are simply assigned. Returns ------- DataFrame A new DataFrame with the new columns in addition to all the existing columns. Notes ----- Assigning multiple columns within the same ``assign`` is possible. For Python 3.6 and above, later items in '\*\*kwargs' may refer to newly created or modified columns in 'df'; items are computed and assigned into 'df' in order. For Python 3.5 and below, the order of keyword arguments is not specified, you cannot refer to newly created or modified columns. All items are computed first, and then assigned in alphabetical order. .. versionchanged :: 0.23.0 Keyword argument order is maintained for Python 3.6 and later. Examples -------- >>> df = pd.DataFrame({'temp_c': [17.0, 25.0]}, ... index=['Portland', 'Berkeley']) >>> df temp_c Portland 17.0 Berkeley 25.0 Where the value is a callable, evaluated on `df`: >>> df.assign(temp_f=lambda x: x.temp_c * 9 / 5 + 32) temp_c temp_f Portland 17.0 62.6 Berkeley 25.0 77.0 Alternatively, the same behavior can be achieved by directly referencing an existing Series or sequence: >>> df.assign(temp_f=df['temp_c'] * 9 / 5 + 32) temp_c temp_f Portland 17.0 62.6 Berkeley 25.0 77.0 In Python 3.6+, you can create multiple columns within the same assign where one of the columns depends on another one defined within the same assign: >>> df.assign(temp_f=lambda x: x['temp_c'] * 9 / 5 + 32, ... temp_k=lambda x: (x['temp_f'] + 459.67) * 5 / 9) temp_c temp_f temp_k Portland 17.0 62.6 290.15 Berkeley 25.0 77.0 298.15 """ data = self.copy() # >= 3.6 preserve order of kwargs if PY36: for k, v in kwargs.items(): data[k] = com.apply_if_callable(v, data) else: # <= 3.5: do all calculations first... results = OrderedDict() for k, v in kwargs.items(): results[k] = com.apply_if_callable(v, data) # <= 3.5 and earlier results = sorted(results.items()) # ... and then assign for k, v in results: data[k] = v return data def _sanitize_column(self, key, value, broadcast=True): """ Ensures new columns (which go into the BlockManager as new blocks) are always copied and converted into an array. Parameters ---------- key : object value : scalar, Series, or array-like broadcast : bool, default True If ``key`` matches multiple duplicate column names in the DataFrame, this parameter indicates whether ``value`` should be tiled so that the returned array contains a (duplicated) column for each occurrence of the key. If False, ``value`` will not be tiled. Returns ------- numpy.ndarray """ def reindexer(value): # reindex if necessary if value.index.equals(self.index) or not len(self.index): value = value._values.copy() else: # GH 4107 try: value = value.reindex(self.index)._values except Exception as e: # duplicate axis if not value.index.is_unique: raise e # other raise TypeError('incompatible index of inserted column ' 'with frame index') return value if isinstance(value, Series): value = reindexer(value) elif isinstance(value, DataFrame): # align right-hand-side columns if self.columns # is multi-index and self[key] is a sub-frame if isinstance(self.columns, MultiIndex) and key in self.columns: loc = self.columns.get_loc(key) if isinstance(loc, (slice, Series, np.ndarray, Index)): cols = maybe_droplevels(self.columns[loc], key) if len(cols) and not cols.equals(value.columns): value = value.reindex(cols, axis=1) # now align rows value = reindexer(value).T elif isinstance(value, ExtensionArray): # Explicitly copy here, instead of in sanitize_index, # as sanitize_index won't copy an EA, even with copy=True value = value.copy() value = sanitize_index(value, self.index, copy=False) elif isinstance(value, Index) or is_sequence(value): # turn me into an ndarray value = sanitize_index(value, self.index, copy=False) if not isinstance(value, (np.ndarray, Index)): if isinstance(value, list) and len(value) > 0: value = maybe_convert_platform(value) else: value = com.asarray_tuplesafe(value) elif value.ndim == 2: value = value.copy().T elif isinstance(value, Index): value = value.copy(deep=True) else: value = value.copy() # possibly infer to datetimelike if is_object_dtype(value.dtype): value = maybe_infer_to_datetimelike(value) else: # cast ignores pandas dtypes. so save the dtype first infer_dtype, _ = infer_dtype_from_scalar( value, pandas_dtype=True) # upcast value = cast_scalar_to_array(len(self.index), value) value = maybe_cast_to_datetime(value, infer_dtype) # return internal types directly if is_extension_type(value) or is_extension_array_dtype(value): return value # broadcast across multiple columns if necessary if broadcast and key in self.columns and value.ndim == 1: if (not self.columns.is_unique or isinstance(self.columns, MultiIndex)): existing_piece = self[key] if isinstance(existing_piece, DataFrame): value = np.tile(value, (len(existing_piece.columns), 1)) return np.atleast_2d(np.asarray(value)) @property def _series(self): return {item: Series(self._data.iget(idx), index=self.index, name=item) for idx, item in enumerate(self.columns)} def lookup(self, row_labels, col_labels): """ Label-based "fancy indexing" function for DataFrame. Given equal-length arrays of row and column labels, return an array of the values corresponding to each (row, col) pair. Parameters ---------- row_labels : sequence The row labels to use for lookup col_labels : sequence The column labels to use for lookup Returns ------- numpy.ndarray Notes ----- Akin to:: result = [df.get_value(row, col) for row, col in zip(row_labels, col_labels)] Examples -------- values : ndarray The found values """ n = len(row_labels) if n != len(col_labels): raise ValueError('Row labels must have same size as column labels') thresh = 1000 if not self._is_mixed_type or n > thresh: values = self.values ridx = self.index.get_indexer(row_labels) cidx = self.columns.get_indexer(col_labels) if (ridx == -1).any(): raise KeyError('One or more row labels was not found') if (cidx == -1).any(): raise KeyError('One or more column labels was not found') flat_index = ridx * len(self.columns) + cidx result = values.flat[flat_index] else: result = np.empty(n, dtype='O') for i, (r, c) in enumerate(zip(row_labels, col_labels)): result[i] = self._get_value(r, c) if is_object_dtype(result): result = lib.maybe_convert_objects(result) return result # ---------------------------------------------------------------------- # Reindexing and alignment def _reindex_axes(self, axes, level, limit, tolerance, method, fill_value, copy): frame = self columns = axes['columns'] if columns is not None: frame = frame._reindex_columns(columns, method, copy, level, fill_value, limit, tolerance) index = axes['index'] if index is not None: frame = frame._reindex_index(index, method, copy, level, fill_value, limit, tolerance) return frame def _reindex_index(self, new_index, method, copy, level, fill_value=np.nan, limit=None, tolerance=None): new_index, indexer = self.index.reindex(new_index, method=method, level=level, limit=limit, tolerance=tolerance) return self._reindex_with_indexers({0: [new_index, indexer]}, copy=copy, fill_value=fill_value, allow_dups=False) def _reindex_columns(self, new_columns, method, copy, level, fill_value=None, limit=None, tolerance=None): new_columns, indexer = self.columns.reindex(new_columns, method=method, level=level, limit=limit, tolerance=tolerance) return self._reindex_with_indexers({1: [new_columns, indexer]}, copy=copy, fill_value=fill_value, allow_dups=False) def _reindex_multi(self, axes, copy, fill_value): """ We are guaranteed non-Nones in the axes. """ new_index, row_indexer = self.index.reindex(axes['index']) new_columns, col_indexer = self.columns.reindex(axes['columns']) if row_indexer is not None and col_indexer is not None: indexer = row_indexer, col_indexer new_values = algorithms.take_2d_multi(self.values, indexer, fill_value=fill_value) return self._constructor(new_values, index=new_index, columns=new_columns) else: return self._reindex_with_indexers({0: [new_index, row_indexer], 1: [new_columns, col_indexer]}, copy=copy, fill_value=fill_value) @Appender(_shared_docs['align'] % _shared_doc_kwargs) def align(self, other, join='outer', axis=None, level=None, copy=True, fill_value=None, method=None, limit=None, fill_axis=0, broadcast_axis=None): return super().align(other, join=join, axis=axis, level=level, copy=copy, fill_value=fill_value, method=method, limit=limit, fill_axis=fill_axis, broadcast_axis=broadcast_axis) @Substitution(**_shared_doc_kwargs) @Appender(NDFrame.reindex.__doc__) @rewrite_axis_style_signature('labels', [('method', None), ('copy', True), ('level', None), ('fill_value', np.nan), ('limit', None), ('tolerance', None)]) def reindex(self, *args, **kwargs): axes = validate_axis_style_args(self, args, kwargs, 'labels', 'reindex') kwargs.update(axes) # Pop these, since the values are in `kwargs` under different names kwargs.pop('axis', None) kwargs.pop('labels', None) return super().reindex(**kwargs) @Appender(_shared_docs['reindex_axis'] % _shared_doc_kwargs) def reindex_axis(self, labels, axis=0, method=None, level=None, copy=True, limit=None, fill_value=np.nan): return super().reindex_axis(labels=labels, axis=axis, method=method, level=level, copy=copy, limit=limit, fill_value=fill_value) def drop(self, labels=None, axis=0, index=None, columns=None, level=None, inplace=False, errors='raise'): """ Drop specified labels from rows or columns. Remove rows or columns by specifying label names and corresponding axis, or by specifying directly index or column names. When using a multi-index, labels on different levels can be removed by specifying the level. Parameters ---------- labels : single label or list-like Index or column labels to drop. axis : {0 or 'index', 1 or 'columns'}, default 0 Whether to drop labels from the index (0 or 'index') or columns (1 or 'columns'). index : single label or list-like Alternative to specifying axis (``labels, axis=0`` is equivalent to ``index=labels``). .. versionadded:: 0.21.0 columns : single label or list-like Alternative to specifying axis (``labels, axis=1`` is equivalent to ``columns=labels``). .. versionadded:: 0.21.0 level : int or level name, optional For MultiIndex, level from which the labels will be removed. inplace : bool, default False If True, do operation inplace and return None. errors : {'ignore', 'raise'}, default 'raise' If 'ignore', suppress error and only existing labels are dropped. Returns ------- DataFrame DataFrame without the removed index or column labels. Raises ------ KeyError If any of the labels is not found in the selected axis. See Also -------- DataFrame.loc : Label-location based indexer for selection by label. DataFrame.dropna : Return DataFrame with labels on given axis omitted where (all or any) data are missing. DataFrame.drop_duplicates : Return DataFrame with duplicate rows removed, optionally only considering certain columns. Series.drop : Return Series with specified index labels removed. Examples -------- >>> df = pd.DataFrame(np.arange(12).reshape(3, 4), ... columns=['A', 'B', 'C', 'D']) >>> df A B C D 0 0 1 2 3 1 4 5 6 7 2 8 9 10 11 Drop columns >>> df.drop(['B', 'C'], axis=1) A D 0 0 3 1 4 7 2 8 11 >>> df.drop(columns=['B', 'C']) A D 0 0 3 1 4 7 2 8 11 Drop a row by index >>> df.drop([0, 1]) A B C D 2 8 9 10 11 Drop columns and/or rows of MultiIndex DataFrame >>> midx = pd.MultiIndex(levels=[['lama', 'cow', 'falcon'], ... ['speed', 'weight', 'length']], ... codes=[[0, 0, 0, 1, 1, 1, 2, 2, 2], ... [0, 1, 2, 0, 1, 2, 0, 1, 2]]) >>> df = pd.DataFrame(index=midx, columns=['big', 'small'], ... data=[[45, 30], [200, 100], [1.5, 1], [30, 20], ... [250, 150], [1.5, 0.8], [320, 250], ... [1, 0.8], [0.3, 0.2]]) >>> df big small lama speed 45.0 30.0 weight 200.0 100.0 length 1.5 1.0 cow speed 30.0 20.0 weight 250.0 150.0 length 1.5 0.8 falcon speed 320.0 250.0 weight 1.0 0.8 length 0.3 0.2 >>> df.drop(index='cow', columns='small') big lama speed 45.0 weight 200.0 length 1.5 falcon speed 320.0 weight 1.0 length 0.3 >>> df.drop(index='length', level=1) big small lama speed 45.0 30.0 weight 200.0 100.0 cow speed 30.0 20.0 weight 250.0 150.0 falcon speed 320.0 250.0 weight 1.0 0.8 """ return super().drop(labels=labels, axis=axis, index=index, columns=columns, level=level, inplace=inplace, errors=errors) @rewrite_axis_style_signature('mapper', [('copy', True), ('inplace', False), ('level', None), ('errors', 'ignore')]) def rename(self, *args, **kwargs): """ Alter axes labels. Function / dict values must be unique (1-to-1). Labels not contained in a dict / Series will be left as-is. Extra labels listed don't throw an error. See the :ref:`user guide <basics.rename>` for more. Parameters ---------- mapper : dict-like or function Dict-like or functions transformations to apply to that axis' values. Use either ``mapper`` and ``axis`` to specify the axis to target with ``mapper``, or ``index`` and ``columns``. index : dict-like or function Alternative to specifying axis (``mapper, axis=0`` is equivalent to ``index=mapper``). columns : dict-like or function Alternative to specifying axis (``mapper, axis=1`` is equivalent to ``columns=mapper``). axis : int or str Axis to target with ``mapper``. Can be either the axis name ('index', 'columns') or number (0, 1). The default is 'index'. copy : bool, default True Also copy underlying data. inplace : bool, default False Whether to return a new DataFrame. If True then value of copy is ignored. level : int or level name, default None In case of a MultiIndex, only rename labels in the specified level. errors : {'ignore', 'raise'}, default 'ignore' If 'raise', raise a `KeyError` when a dict-like `mapper`, `index`, or `columns` contains labels that are not present in the Index being transformed. If 'ignore', existing keys will be renamed and extra keys will be ignored. Returns ------- DataFrame DataFrame with the renamed axis labels. Raises ------ KeyError If any of the labels is not found in the selected axis and "errors='raise'". See Also -------- DataFrame.rename_axis : Set the name of the axis. Examples -------- ``DataFrame.rename`` supports two calling conventions * ``(index=index_mapper, columns=columns_mapper, ...)`` * ``(mapper, axis={'index', 'columns'}, ...)`` We *highly* recommend using keyword arguments to clarify your intent. Rename columns using a mapping: >>> df = pd.DataFrame({"A": [1, 2, 3], "B": [4, 5, 6]}) >>> df.rename(columns={"A": "a", "B": "c"}) a c 0 1 4 1 2 5 2 3 6 Rename index using a mapping: >>> df.rename(index={0: "x", 1: "y", 2: "z"}) A B x 1 4 y 2 5 z 3 6 Cast index labels to a different type: >>> df.index RangeIndex(start=0, stop=3, step=1) >>> df.rename(index=str).index Index(['0', '1', '2'], dtype='object') >>> df.rename(columns={"A": "a", "B": "b", "C": "c"}, errors="raise") Traceback (most recent call last): KeyError: ['C'] not found in axis Using axis-style parameters >>> df.rename(str.lower, axis='columns') a b 0 1 4 1 2 5 2 3 6 >>> df.rename({1: 2, 2: 4}, axis='index') A B 0 1 4 2 2 5 4 3 6 """ axes = validate_axis_style_args(self, args, kwargs, 'mapper', 'rename') kwargs.update(axes) # Pop these, since the values are in `kwargs` under different names kwargs.pop('axis', None) kwargs.pop('mapper', None) return super().rename(**kwargs) @Substitution(**_shared_doc_kwargs) @Appender(NDFrame.fillna.__doc__) def fillna(self, value=None, method=None, axis=None, inplace=False, limit=None, downcast=None, **kwargs): return super().fillna(value=value, method=method, axis=axis, inplace=inplace, limit=limit, downcast=downcast, **kwargs) @Appender(_shared_docs['replace'] % _shared_doc_kwargs) def replace(self, to_replace=None, value=None, inplace=False, limit=None, regex=False, method='pad'): return super().replace(to_replace=to_replace, value=value, inplace=inplace, limit=limit, regex=regex, method=method) @Appender(_shared_docs['shift'] % _shared_doc_kwargs) def shift(self, periods=1, freq=None, axis=0, fill_value=None): return super().shift(periods=periods, freq=freq, axis=axis, fill_value=fill_value) def set_index(self, keys, drop=True, append=False, inplace=False, verify_integrity=False): """ Set the DataFrame index using existing columns. Set the DataFrame index (row labels) using one or more existing columns or arrays (of the correct length). The index can replace the existing index or expand on it. Parameters ---------- keys : label or array-like or list of labels/arrays This parameter can be either a single column key, a single array of the same length as the calling DataFrame, or a list containing an arbitrary combination of column keys and arrays. Here, "array" encompasses :class:`Series`, :class:`Index`, ``np.ndarray``, and instances of :class:`~collections.abc.Iterator`. drop : bool, default True Delete columns to be used as the new index. append : bool, default False Whether to append columns to existing index. inplace : bool, default False Modify the DataFrame in place (do not create a new object). verify_integrity : bool, default False Check the new index for duplicates. Otherwise defer the check until necessary. Setting to False will improve the performance of this method. Returns ------- DataFrame Changed row labels. See Also -------- DataFrame.reset_index : Opposite of set_index. DataFrame.reindex : Change to new indices or expand indices. DataFrame.reindex_like : Change to same indices as other DataFrame. Examples -------- >>> df = pd.DataFrame({'month': [1, 4, 7, 10], ... 'year': [2012, 2014, 2013, 2014], ... 'sale': [55, 40, 84, 31]}) >>> df month year sale 0 1 2012 55 1 4 2014 40 2 7 2013 84 3 10 2014 31 Set the index to become the 'month' column: >>> df.set_index('month') year sale month 1 2012 55 4 2014 40 7 2013 84 10 2014 31 Create a MultiIndex using columns 'year' and 'month': >>> df.set_index(['year', 'month']) sale year month 2012 1 55 2014 4 40 2013 7 84 2014 10 31 Create a MultiIndex using an Index and a column: >>> df.set_index([pd.Index([1, 2, 3, 4]), 'year']) month sale year 1 2012 1 55 2 2014 4 40 3 2013 7 84 4 2014 10 31 Create a MultiIndex using two Series: >>> s = pd.Series([1, 2, 3, 4]) >>> df.set_index([s, s**2]) month year sale 1 1 1 2012 55 2 4 4 2014 40 3 9 7 2013 84 4 16 10 2014 31 """ inplace = validate_bool_kwarg(inplace, 'inplace') if not isinstance(keys, list): keys = [keys] err_msg = ('The parameter "keys" may be a column key, one-dimensional ' 'array, or a list containing only valid column keys and ' 'one-dimensional arrays.') missing = [] for col in keys: if isinstance(col, (ABCIndexClass, ABCSeries, np.ndarray, list, abc.Iterator)): # arrays are fine as long as they are one-dimensional # iterators get converted to list below if getattr(col, 'ndim', 1) != 1: raise ValueError(err_msg) else: # everything else gets tried as a key; see GH 24969 try: found = col in self.columns except TypeError: raise TypeError(err_msg + ' Received column of ' 'type {}'.format(type(col))) else: if not found: missing.append(col) if missing: raise KeyError('None of {} are in the columns'.format(missing)) if inplace: frame = self else: frame = self.copy() arrays = [] names = [] if append: names = [x for x in self.index.names] if isinstance(self.index, ABCMultiIndex): for i in range(self.index.nlevels): arrays.append(self.index._get_level_values(i)) else: arrays.append(self.index) to_remove = [] for col in keys: if isinstance(col, ABCMultiIndex): for n in range(col.nlevels): arrays.append(col._get_level_values(n)) names.extend(col.names) elif isinstance(col, (ABCIndexClass, ABCSeries)): # if Index then not MultiIndex (treated above) arrays.append(col) names.append(col.name) elif isinstance(col, (list, np.ndarray)): arrays.append(col) names.append(None) elif isinstance(col, abc.Iterator): arrays.append(list(col)) names.append(None) # from here, col can only be a column label else: arrays.append(frame[col]._values) names.append(col) if drop: to_remove.append(col) if len(arrays[-1]) != len(self): # check newest element against length of calling frame, since # ensure_index_from_sequences would not raise for append=False. raise ValueError('Length mismatch: Expected {len_self} rows, ' 'received array of length {len_col}'.format( len_self=len(self), len_col=len(arrays[-1]) )) index = ensure_index_from_sequences(arrays, names) if verify_integrity and not index.is_unique: duplicates = index[index.duplicated()].unique() raise ValueError('Index has duplicate keys: {dup}'.format( dup=duplicates)) # use set to handle duplicate column names gracefully in case of drop for c in set(to_remove): del frame[c] # clear up memory usage index._cleanup() frame.index = index if not inplace: return frame def reset_index(self, level=None, drop=False, inplace=False, col_level=0, col_fill=''): """ Reset the index, or a level of it. Reset the index of the DataFrame, and use the default one instead. If the DataFrame has a MultiIndex, this method can remove one or more levels. Parameters ---------- level : int, str, tuple, or list, default None Only remove the given levels from the index. Removes all levels by default. drop : bool, default False Do not try to insert index into dataframe columns. This resets the index to the default integer index. inplace : bool, default False Modify the DataFrame in place (do not create a new object). col_level : int or str, default 0 If the columns have multiple levels, determines which level the labels are inserted into. By default it is inserted into the first level. col_fill : object, default '' If the columns have multiple levels, determines how the other levels are named. If None then the index name is repeated. Returns ------- DataFrame DataFrame with the new index. See Also -------- DataFrame.set_index : Opposite of reset_index. DataFrame.reindex : Change to new indices or expand indices. DataFrame.reindex_like : Change to same indices as other DataFrame. Examples -------- >>> df = pd.DataFrame([('bird', 389.0), ... ('bird', 24.0), ... ('mammal', 80.5), ... ('mammal', np.nan)], ... index=['falcon', 'parrot', 'lion', 'monkey'], ... columns=('class', 'max_speed')) >>> df class max_speed falcon bird 389.0 parrot bird 24.0 lion mammal 80.5 monkey mammal NaN When we reset the index, the old index is added as a column, and a new sequential index is used: >>> df.reset_index() index class max_speed 0 falcon bird 389.0 1 parrot bird 24.0 2 lion mammal 80.5 3 monkey mammal NaN We can use the `drop` parameter to avoid the old index being added as a column: >>> df.reset_index(drop=True) class max_speed 0 bird 389.0 1 bird 24.0 2 mammal 80.5 3 mammal NaN You can also use `reset_index` with `MultiIndex`. >>> index = pd.MultiIndex.from_tuples([('bird', 'falcon'), ... ('bird', 'parrot'), ... ('mammal', 'lion'), ... ('mammal', 'monkey')], ... names=['class', 'name']) >>> columns = pd.MultiIndex.from_tuples([('speed', 'max'), ... ('species', 'type')]) >>> df = pd.DataFrame([(389.0, 'fly'), ... ( 24.0, 'fly'), ... ( 80.5, 'run'), ... (np.nan, 'jump')], ... index=index, ... columns=columns) >>> df speed species max type class name bird falcon 389.0 fly parrot 24.0 fly mammal lion 80.5 run monkey NaN jump If the index has multiple levels, we can reset a subset of them: >>> df.reset_index(level='class') class speed species max type name falcon bird 389.0 fly parrot bird 24.0 fly lion mammal 80.5 run monkey mammal NaN jump If we are not dropping the index, by default, it is placed in the top level. We can place it in another level: >>> df.reset_index(level='class', col_level=1) speed species class max type name falcon bird 389.0 fly parrot bird 24.0 fly lion mammal 80.5 run monkey mammal NaN jump When the index is inserted under another level, we can specify under which one with the parameter `col_fill`: >>> df.reset_index(level='class', col_level=1, col_fill='species') species speed species class max type name falcon bird 389.0 fly parrot bird 24.0 fly lion mammal 80.5 run monkey mammal NaN jump If we specify a nonexistent level for `col_fill`, it is created: >>> df.reset_index(level='class', col_level=1, col_fill='genus') genus speed species class max type name falcon bird 389.0 fly parrot bird 24.0 fly lion mammal 80.5 run monkey mammal NaN jump """ inplace = validate_bool_kwarg(inplace, 'inplace') if inplace: new_obj = self else: new_obj = self.copy() def _maybe_casted_values(index, labels=None): values = index._values if not isinstance(index, (PeriodIndex, DatetimeIndex)): if values.dtype == np.object_: values = lib.maybe_convert_objects(values) # if we have the labels, extract the values with a mask if labels is not None: mask = labels == -1 # we can have situations where the whole mask is -1, # meaning there is nothing found in labels, so make all nan's if mask.all(): values = np.empty(len(mask)) values.fill(np.nan) else: values = values.take(labels) # TODO(https://github.com/pandas-dev/pandas/issues/24206) # Push this into maybe_upcast_putmask? # We can't pass EAs there right now. Looks a bit # complicated. # So we unbox the ndarray_values, op, re-box. values_type = type(values) values_dtype = values.dtype if issubclass(values_type, DatetimeLikeArray): values = values._data if mask.any(): values, changed = maybe_upcast_putmask( values, mask, np.nan) if issubclass(values_type, DatetimeLikeArray): values = values_type(values, dtype=values_dtype) return values new_index = ibase.default_index(len(new_obj)) if level is not None: if not isinstance(level, (tuple, list)): level = [level] level = [self.index._get_level_number(lev) for lev in level] if len(level) < self.index.nlevels: new_index = self.index.droplevel(level) if not drop: if isinstance(self.index, MultiIndex): names = [n if n is not None else ('level_%d' % i) for (i, n) in enumerate(self.index.names)] to_insert = zip(self.index.levels, self.index.codes) else: default = 'index' if 'index' not in self else 'level_0' names = ([default] if self.index.name is None else [self.index.name]) to_insert = ((self.index, None),) multi_col = isinstance(self.columns, MultiIndex) for i, (lev, lab) in reversed(list(enumerate(to_insert))): if not (level is None or i in level): continue name = names[i] if multi_col: col_name = (list(name) if isinstance(name, tuple) else [name]) if col_fill is None: if len(col_name) not in (1, self.columns.nlevels): raise ValueError("col_fill=None is incompatible " "with incomplete column name " "{}".format(name)) col_fill = col_name[0] lev_num = self.columns._get_level_number(col_level) name_lst = [col_fill] * lev_num + col_name missing = self.columns.nlevels - len(name_lst) name_lst += [col_fill] * missing name = tuple(name_lst) # to ndarray and maybe infer different dtype level_values = _maybe_casted_values(lev, lab) new_obj.insert(0, name, level_values) new_obj.index = new_index if not inplace: return new_obj # ---------------------------------------------------------------------- # Reindex-based selection methods @Appender(_shared_docs['isna'] % _shared_doc_kwargs) def isna(self): return super().isna() @Appender(_shared_docs['isna'] % _shared_doc_kwargs) def isnull(self): return super().isnull() @Appender(_shared_docs['notna'] % _shared_doc_kwargs) def notna(self): return super().notna() @Appender(_shared_docs['notna'] % _shared_doc_kwargs) def notnull(self): return super().notnull() def dropna(self, axis=0, how='any', thresh=None, subset=None, inplace=False): """ Remove missing values. See the :ref:`User Guide <missing_data>` for more on which values are considered missing, and how to work with missing data. Parameters ---------- axis : {0 or 'index', 1 or 'columns'}, default 0 Determine if rows or columns which contain missing values are removed. * 0, or 'index' : Drop rows which contain missing values. * 1, or 'columns' : Drop columns which contain missing value. .. deprecated:: 0.23.0 Pass tuple or list to drop on multiple axes. Only a single axis is allowed. how : {'any', 'all'}, default 'any' Determine if row or column is removed from DataFrame, when we have at least one NA or all NA. * 'any' : If any NA values are present, drop that row or column. * 'all' : If all values are NA, drop that row or column. thresh : int, optional Require that many non-NA values. subset : array-like, optional Labels along other axis to consider, e.g. if you are dropping rows these would be a list of columns to include. inplace : bool, default False If True, do operation inplace and return None. Returns ------- DataFrame DataFrame with NA entries dropped from it. See Also -------- DataFrame.isna: Indicate missing values. DataFrame.notna : Indicate existing (non-missing) values. DataFrame.fillna : Replace missing values. Series.dropna : Drop missing values. Index.dropna : Drop missing indices. Examples -------- >>> df = pd.DataFrame({"name": ['Alfred', 'Batman', 'Catwoman'], ... "toy": [np.nan, 'Batmobile', 'Bullwhip'], ... "born": [pd.NaT, pd.Timestamp("1940-04-25"), ... pd.NaT]}) >>> df name toy born 0 Alfred NaN NaT 1 Batman Batmobile 1940-04-25 2 Catwoman Bullwhip NaT Drop the rows where at least one element is missing. >>> df.dropna() name toy born 1 Batman Batmobile 1940-04-25 Drop the columns where at least one element is missing. >>> df.dropna(axis='columns') name 0 Alfred 1 Batman 2 Catwoman Drop the rows where all elements are missing. >>> df.dropna(how='all') name toy born 0 Alfred NaN NaT 1 Batman Batmobile 1940-04-25 2 Catwoman Bullwhip NaT Keep only the rows with at least 2 non-NA values. >>> df.dropna(thresh=2) name toy born 1 Batman Batmobile 1940-04-25 2 Catwoman Bullwhip NaT Define in which columns to look for missing values. >>> df.dropna(subset=['name', 'born']) name toy born 1 Batman Batmobile 1940-04-25 Keep the DataFrame with valid entries in the same variable. >>> df.dropna(inplace=True) >>> df name toy born 1 Batman Batmobile 1940-04-25 """ inplace = validate_bool_kwarg(inplace, 'inplace') if isinstance(axis, (tuple, list)): # GH20987 msg = ("supplying multiple axes to axis is deprecated and " "will be removed in a future version.") warnings.warn(msg, FutureWarning, stacklevel=2) result = self for ax in axis: result = result.dropna(how=how, thresh=thresh, subset=subset, axis=ax) else: axis = self._get_axis_number(axis) agg_axis = 1 - axis agg_obj = self if subset is not None: ax = self._get_axis(agg_axis) indices = ax.get_indexer_for(subset) check = indices == -1 if check.any(): raise KeyError(list(np.compress(check, subset))) agg_obj = self.take(indices, axis=agg_axis) count = agg_obj.count(axis=agg_axis) if thresh is not None: mask = count >= thresh elif how == 'any': mask = count == len(agg_obj._get_axis(agg_axis)) elif how == 'all': mask = count > 0 else: if how is not None: raise ValueError('invalid how option: {h}'.format(h=how)) else: raise TypeError('must specify how or thresh') result = self.loc(axis=axis)[mask] if inplace: self._update_inplace(result) else: return result def drop_duplicates(self, subset=None, keep='first', inplace=False): """ Return DataFrame with duplicate rows removed, optionally only considering certain columns. Indexes, including time indexes are ignored. Parameters ---------- subset : column label or sequence of labels, optional Only consider certain columns for identifying duplicates, by default use all of the columns keep : {'first', 'last', False}, default 'first' - ``first`` : Drop duplicates except for the first occurrence. - ``last`` : Drop duplicates except for the last occurrence. - False : Drop all duplicates. inplace : boolean, default False Whether to drop duplicates in place or to return a copy Returns ------- DataFrame """ if self.empty: return self.copy() inplace = validate_bool_kwarg(inplace, 'inplace') duplicated = self.duplicated(subset, keep=keep) if inplace: inds, = (-duplicated)._ndarray_values.nonzero() new_data = self._data.take(inds) self._update_inplace(new_data) else: return self[-duplicated] def duplicated(self, subset=None, keep='first'): """ Return boolean Series denoting duplicate rows, optionally only considering certain columns. Parameters ---------- subset : column label or sequence of labels, optional Only consider certain columns for identifying duplicates, by default use all of the columns keep : {'first', 'last', False}, default 'first' - ``first`` : Mark duplicates as ``True`` except for the first occurrence. - ``last`` : Mark duplicates as ``True`` except for the last occurrence. - False : Mark all duplicates as ``True``. Returns ------- Series """ from pandas.core.sorting import get_group_index from pandas._libs.hashtable import duplicated_int64, _SIZE_HINT_LIMIT if self.empty: return Series(dtype=bool) def f(vals): labels, shape = algorithms.factorize( vals, size_hint=min(len(self), _SIZE_HINT_LIMIT)) return labels.astype('i8', copy=False), len(shape) if subset is None: subset = self.columns elif (not np.iterable(subset) or isinstance(subset, str) or isinstance(subset, tuple) and subset in self.columns): subset = subset, # Verify all columns in subset exist in the queried dataframe # Otherwise, raise a KeyError, same as if you try to __getitem__ with a # key that doesn't exist. diff = Index(subset).difference(self.columns) if not diff.empty: raise KeyError(diff) vals = (col.values for name, col in self.iteritems() if name in subset) labels, shape = map(list, zip(*map(f, vals))) ids = get_group_index(labels, shape, sort=False, xnull=False) return Series(duplicated_int64(ids, keep), index=self.index) # ---------------------------------------------------------------------- # Sorting @Substitution(**_shared_doc_kwargs) @Appender(NDFrame.sort_values.__doc__) def sort_values(self, by, axis=0, ascending=True, inplace=False, kind='quicksort', na_position='last'): inplace = validate_bool_kwarg(inplace, 'inplace') axis = self._get_axis_number(axis) if not isinstance(by, list): by = [by] if is_sequence(ascending) and len(by) != len(ascending): raise ValueError('Length of ascending (%d) != length of by (%d)' % (len(ascending), len(by))) if len(by) > 1: from pandas.core.sorting import lexsort_indexer keys = [self._get_label_or_level_values(x, axis=axis) for x in by] indexer = lexsort_indexer(keys, orders=ascending, na_position=na_position) indexer = ensure_platform_int(indexer) else: from pandas.core.sorting import nargsort by = by[0] k = self._get_label_or_level_values(by, axis=axis) if isinstance(ascending, (tuple, list)): ascending = ascending[0] indexer = nargsort(k, kind=kind, ascending=ascending, na_position=na_position) new_data = self._data.take(indexer, axis=self._get_block_manager_axis(axis), verify=False) if inplace: return self._update_inplace(new_data) else: return self._constructor(new_data).__finalize__(self) @Substitution(**_shared_doc_kwargs) @Appender(NDFrame.sort_index.__doc__) def sort_index(self, axis=0, level=None, ascending=True, inplace=False, kind='quicksort', na_position='last', sort_remaining=True, by=None): # TODO: this can be combined with Series.sort_index impl as # almost identical inplace = validate_bool_kwarg(inplace, 'inplace') # 10726 if by is not None: warnings.warn("by argument to sort_index is deprecated, " "please use .sort_values(by=...)", FutureWarning, stacklevel=2) if level is not None: raise ValueError("unable to simultaneously sort by and level") return self.sort_values(by, axis=axis, ascending=ascending, inplace=inplace) axis = self._get_axis_number(axis) labels = self._get_axis(axis) # make sure that the axis is lexsorted to start # if not we need to reconstruct to get the correct indexer labels = labels._sort_levels_monotonic() if level is not None: new_axis, indexer = labels.sortlevel(level, ascending=ascending, sort_remaining=sort_remaining) elif isinstance(labels, MultiIndex): from pandas.core.sorting import lexsort_indexer indexer = lexsort_indexer(labels._get_codes_for_sorting(), orders=ascending, na_position=na_position) else: from pandas.core.sorting import nargsort # Check monotonic-ness before sort an index # GH11080 if ((ascending and labels.is_monotonic_increasing) or (not ascending and labels.is_monotonic_decreasing)): if inplace: return else: return self.copy() indexer = nargsort(labels, kind=kind, ascending=ascending, na_position=na_position) baxis = self._get_block_manager_axis(axis) new_data = self._data.take(indexer, axis=baxis, verify=False) # reconstruct axis if needed new_data.axes[baxis] = new_data.axes[baxis]._sort_levels_monotonic() if inplace: return self._update_inplace(new_data) else: return self._constructor(new_data).__finalize__(self) def nlargest(self, n, columns, keep='first'): """ Return the first `n` rows ordered by `columns` in descending order. Return the first `n` rows with the largest values in `columns`, in descending order. The columns that are not specified are returned as well, but not used for ordering. This method is equivalent to ``df.sort_values(columns, ascending=False).head(n)``, but more performant. Parameters ---------- n : int Number of rows to return. columns : label or list of labels Column label(s) to order by. keep : {'first', 'last', 'all'}, default 'first' Where there are duplicate values: - `first` : prioritize the first occurrence(s) - `last` : prioritize the last occurrence(s) - ``all`` : do not drop any duplicates, even it means selecting more than `n` items. .. versionadded:: 0.24.0 Returns ------- DataFrame The first `n` rows ordered by the given columns in descending order. See Also -------- DataFrame.nsmallest : Return the first `n` rows ordered by `columns` in ascending order. DataFrame.sort_values : Sort DataFrame by the values. DataFrame.head : Return the first `n` rows without re-ordering. Notes ----- This function cannot be used with all column types. For example, when specifying columns with `object` or `category` dtypes, ``TypeError`` is raised. Examples -------- >>> df = pd.DataFrame({'population': [59000000, 65000000, 434000, ... 434000, 434000, 337000, 11300, ... 11300, 11300], ... 'GDP': [1937894, 2583560 , 12011, 4520, 12128, ... 17036, 182, 38, 311], ... 'alpha-2': ["IT", "FR", "MT", "MV", "BN", ... "IS", "NR", "TV", "AI"]}, ... index=["Italy", "France", "Malta", ... "Maldives", "Brunei", "Iceland", ... "Nauru", "Tuvalu", "Anguilla"]) >>> df population GDP alpha-2 Italy 59000000 1937894 IT France 65000000 2583560 FR Malta 434000 12011 MT Maldives 434000 4520 MV Brunei 434000 12128 BN Iceland 337000 17036 IS Nauru 11300 182 NR Tuvalu 11300 38 TV Anguilla 11300 311 AI In the following example, we will use ``nlargest`` to select the three rows having the largest values in column "population". >>> df.nlargest(3, 'population') population GDP alpha-2 France 65000000 2583560 FR Italy 59000000 1937894 IT Malta 434000 12011 MT When using ``keep='last'``, ties are resolved in reverse order: >>> df.nlargest(3, 'population', keep='last') population GDP alpha-2 France 65000000 2583560 FR Italy 59000000 1937894 IT Brunei 434000 12128 BN When using ``keep='all'``, all duplicate items are maintained: >>> df.nlargest(3, 'population', keep='all') population GDP alpha-2 France 65000000 2583560 FR Italy 59000000 1937894 IT Malta 434000 12011 MT Maldives 434000 4520 MV Brunei 434000 12128 BN To order by the largest values in column "population" and then "GDP", we can specify multiple columns like in the next example. >>> df.nlargest(3, ['population', 'GDP']) population GDP alpha-2 France 65000000 2583560 FR Italy 59000000 1937894 IT Brunei 434000 12128 BN """ return algorithms.SelectNFrame(self, n=n, keep=keep, columns=columns).nlargest() def nsmallest(self, n, columns, keep='first'): """ Return the first `n` rows ordered by `columns` in ascending order. Return the first `n` rows with the smallest values in `columns`, in ascending order. The columns that are not specified are returned as well, but not used for ordering. This method is equivalent to ``df.sort_values(columns, ascending=True).head(n)``, but more performant. Parameters ---------- n : int Number of items to retrieve. columns : list or str Column name or names to order by. keep : {'first', 'last', 'all'}, default 'first' Where there are duplicate values: - ``first`` : take the first occurrence. - ``last`` : take the last occurrence. - ``all`` : do not drop any duplicates, even it means selecting more than `n` items. .. versionadded:: 0.24.0 Returns ------- DataFrame See Also -------- DataFrame.nlargest : Return the first `n` rows ordered by `columns` in descending order. DataFrame.sort_values : Sort DataFrame by the values. DataFrame.head : Return the first `n` rows without re-ordering. Examples -------- >>> df = pd.DataFrame({'population': [59000000, 65000000, 434000, ... 434000, 434000, 337000, 11300, ... 11300, 11300], ... 'GDP': [1937894, 2583560 , 12011, 4520, 12128, ... 17036, 182, 38, 311], ... 'alpha-2': ["IT", "FR", "MT", "MV", "BN", ... "IS", "NR", "TV", "AI"]}, ... index=["Italy", "France", "Malta", ... "Maldives", "Brunei", "Iceland", ... "Nauru", "Tuvalu", "Anguilla"]) >>> df population GDP alpha-2 Italy 59000000 1937894 IT France 65000000 2583560 FR Malta 434000 12011 MT Maldives 434000 4520 MV Brunei 434000 12128 BN Iceland 337000 17036 IS Nauru 11300 182 NR Tuvalu 11300 38 TV Anguilla 11300 311 AI In the following example, we will use ``nsmallest`` to select the three rows having the smallest values in column "a". >>> df.nsmallest(3, 'population') population GDP alpha-2 Nauru 11300 182 NR Tuvalu 11300 38 TV Anguilla 11300 311 AI When using ``keep='last'``, ties are resolved in reverse order: >>> df.nsmallest(3, 'population', keep='last') population GDP alpha-2 Anguilla 11300 311 AI Tuvalu 11300 38 TV Nauru 11300 182 NR When using ``keep='all'``, all duplicate items are maintained: >>> df.nsmallest(3, 'population', keep='all') population GDP alpha-2 Nauru 11300 182 NR Tuvalu 11300 38 TV Anguilla 11300 311 AI To order by the largest values in column "a" and then "c", we can specify multiple columns like in the next example. >>> df.nsmallest(3, ['population', 'GDP']) population GDP alpha-2 Tuvalu 11300 38 TV Nauru 11300 182 NR Anguilla 11300 311 AI """ return algorithms.SelectNFrame(self, n=n, keep=keep, columns=columns).nsmallest() def swaplevel(self, i=-2, j=-1, axis=0): """ Swap levels i and j in a MultiIndex on a particular axis. Parameters ---------- i, j : int, string (can be mixed) Level of index to be swapped. Can pass level name as string. Returns ------- DataFrame .. versionchanged:: 0.18.1 The indexes ``i`` and ``j`` are now optional, and default to the two innermost levels of the index. """ result = self.copy() axis = self._get_axis_number(axis) if axis == 0: result.index = result.index.swaplevel(i, j) else: result.columns = result.columns.swaplevel(i, j) return result def reorder_levels(self, order, axis=0): """ Rearrange index levels using input order. May not drop or duplicate levels. Parameters ---------- order : list of int or list of str List representing new level order. Reference level by number (position) or by key (label). axis : int Where to reorder levels. Returns ------- type of caller (new object) """ axis = self._get_axis_number(axis) if not isinstance(self._get_axis(axis), MultiIndex): # pragma: no cover raise TypeError('Can only reorder levels on a hierarchical axis.') result = self.copy() if axis == 0: result.index = result.index.reorder_levels(order) else: result.columns = result.columns.reorder_levels(order) return result # ---------------------------------------------------------------------- # Arithmetic / combination related def _combine_frame(self, other, func, fill_value=None, level=None): this, other = self.align(other, join='outer', level=level, copy=False) new_index, new_columns = this.index, this.columns def _arith_op(left, right): # for the mixed_type case where we iterate over columns, # _arith_op(left, right) is equivalent to # left._binop(right, func, fill_value=fill_value) left, right = ops.fill_binop(left, right, fill_value) return func(left, right) if ops.should_series_dispatch(this, other, func): # iterate over columns return ops.dispatch_to_series(this, other, _arith_op) else: result = _arith_op(this.values, other.values) return self._constructor(result, index=new_index, columns=new_columns, copy=False) def _combine_match_index(self, other, func, level=None): left, right = self.align(other, join='outer', axis=0, level=level, copy=False) assert left.index.equals(right.index) if left._is_mixed_type or right._is_mixed_type: # operate column-wise; avoid costly object-casting in `.values` return ops.dispatch_to_series(left, right, func) else: # fastpath --> operate directly on values with np.errstate(all="ignore"): new_data = func(left.values.T, right.values).T return self._constructor(new_data, index=left.index, columns=self.columns, copy=False) def _combine_match_columns(self, other, func, level=None): assert isinstance(other, Series) left, right = self.align(other, join='outer', axis=1, level=level, copy=False) assert left.columns.equals(right.index) return ops.dispatch_to_series(left, right, func, axis="columns") def _combine_const(self, other, func): assert lib.is_scalar(other) or np.ndim(other) == 0 return ops.dispatch_to_series(self, other, func) def combine(self, other, func, fill_value=None, overwrite=True): """ Perform column-wise combine with another DataFrame. Combines a DataFrame with `other` DataFrame using `func` to element-wise combine columns. The row and column indexes of the resulting DataFrame will be the union of the two. Parameters ---------- other : DataFrame The DataFrame to merge column-wise. func : function Function that takes two series as inputs and return a Series or a scalar. Used to merge the two dataframes column by columns. fill_value : scalar value, default None The value to fill NaNs with prior to passing any column to the merge func. overwrite : bool, default True If True, columns in `self` that do not exist in `other` will be overwritten with NaNs. Returns ------- DataFrame Combination of the provided DataFrames. See Also -------- DataFrame.combine_first : Combine two DataFrame objects and default to non-null values in frame calling the method. Examples -------- Combine using a simple function that chooses the smaller column. >>> df1 = pd.DataFrame({'A': [0, 0], 'B': [4, 4]}) >>> df2 = pd.DataFrame({'A': [1, 1], 'B': [3, 3]}) >>> take_smaller = lambda s1, s2: s1 if s1.sum() < s2.sum() else s2 >>> df1.combine(df2, take_smaller) A B 0 0 3 1 0 3 Example using a true element-wise combine function. >>> df1 = pd.DataFrame({'A': [5, 0], 'B': [2, 4]}) >>> df2 = pd.DataFrame({'A': [1, 1], 'B': [3, 3]}) >>> df1.combine(df2, np.minimum) A B 0 1 2 1 0 3 Using `fill_value` fills Nones prior to passing the column to the merge function. >>> df1 = pd.DataFrame({'A': [0, 0], 'B': [None, 4]}) >>> df2 = pd.DataFrame({'A': [1, 1], 'B': [3, 3]}) >>> df1.combine(df2, take_smaller, fill_value=-5) A B 0 0 -5.0 1 0 4.0 However, if the same element in both dataframes is None, that None is preserved >>> df1 = pd.DataFrame({'A': [0, 0], 'B': [None, 4]}) >>> df2 = pd.DataFrame({'A': [1, 1], 'B': [None, 3]}) >>> df1.combine(df2, take_smaller, fill_value=-5) A B 0 0 -5.0 1 0 3.0 Example that demonstrates the use of `overwrite` and behavior when the axis differ between the dataframes. >>> df1 = pd.DataFrame({'A': [0, 0], 'B': [4, 4]}) >>> df2 = pd.DataFrame({'B': [3, 3], 'C': [-10, 1], }, index=[1, 2]) >>> df1.combine(df2, take_smaller) A B C 0 NaN NaN NaN 1 NaN 3.0 -10.0 2 NaN 3.0 1.0 >>> df1.combine(df2, take_smaller, overwrite=False) A B C 0 0.0 NaN NaN 1 0.0 3.0 -10.0 2 NaN 3.0 1.0 Demonstrating the preference of the passed in dataframe. >>> df2 = pd.DataFrame({'B': [3, 3], 'C': [1, 1], }, index=[1, 2]) >>> df2.combine(df1, take_smaller) A B C 0 0.0 NaN NaN 1 0.0 3.0 NaN 2 NaN 3.0 NaN >>> df2.combine(df1, take_smaller, overwrite=False) A B C 0 0.0 NaN NaN 1 0.0 3.0 1.0 2 NaN 3.0 1.0 """ other_idxlen = len(other.index) # save for compare this, other = self.align(other, copy=False) new_index = this.index if other.empty and len(new_index) == len(self.index): return self.copy() if self.empty and len(other) == other_idxlen: return other.copy() # sorts if possible new_columns = this.columns.union(other.columns) do_fill = fill_value is not None result = {} for col in new_columns: series = this[col] otherSeries = other[col] this_dtype = series.dtype other_dtype = otherSeries.dtype this_mask = isna(series) other_mask = isna(otherSeries) # don't overwrite columns unnecessarily # DO propagate if this column is not in the intersection if not overwrite and other_mask.all(): result[col] = this[col].copy() continue if do_fill: series = series.copy() otherSeries = otherSeries.copy() series[this_mask] = fill_value otherSeries[other_mask] = fill_value if col not in self.columns: # If self DataFrame does not have col in other DataFrame, # try to promote series, which is all NaN, as other_dtype. new_dtype = other_dtype try: series = series.astype(new_dtype, copy=False) except ValueError: # e.g. new_dtype is integer types pass else: # if we have different dtypes, possibly promote new_dtype = find_common_type([this_dtype, other_dtype]) if not is_dtype_equal(this_dtype, new_dtype): series = series.astype(new_dtype) if not is_dtype_equal(other_dtype, new_dtype): otherSeries = otherSeries.astype(new_dtype) arr = func(series, otherSeries) arr = maybe_downcast_to_dtype(arr, this_dtype) result[col] = arr # convert_objects just in case return self._constructor(result, index=new_index, columns=new_columns) def combine_first(self, other): """ Update null elements with value in the same location in `other`. Combine two DataFrame objects by filling null values in one DataFrame with non-null values from other DataFrame. The row and column indexes of the resulting DataFrame will be the union of the two. Parameters ---------- other : DataFrame Provided DataFrame to use to fill null values. Returns ------- DataFrame See Also -------- DataFrame.combine : Perform series-wise operation on two DataFrames using a given function. Examples -------- >>> df1 = pd.DataFrame({'A': [None, 0], 'B': [None, 4]}) >>> df2 = pd.DataFrame({'A': [1, 1], 'B': [3, 3]}) >>> df1.combine_first(df2) A B 0 1.0 3.0 1 0.0 4.0 Null values still persist if the location of that null value does not exist in `other` >>> df1 = pd.DataFrame({'A': [None, 0], 'B': [4, None]}) >>> df2 = pd.DataFrame({'B': [3, 3], 'C': [1, 1]}, index=[1, 2]) >>> df1.combine_first(df2) A B C 0 NaN 4.0 NaN 1 0.0 3.0 1.0 2 NaN 3.0 1.0 """ import pandas.core.computation.expressions as expressions def extract_values(arr): # Does two things: # 1. maybe gets the values from the Series / Index # 2. convert datelike to i8 if isinstance(arr, (ABCIndexClass, ABCSeries)): arr = arr._values if needs_i8_conversion(arr): if is_extension_array_dtype(arr.dtype): arr = arr.asi8 else: arr = arr.view('i8') return arr def combiner(x, y): mask = isna(x) if isinstance(mask, (ABCIndexClass, ABCSeries)): mask = mask._values x_values = extract_values(x) y_values = extract_values(y) # If the column y in other DataFrame is not in first DataFrame, # just return y_values. if y.name not in self.columns: return y_values return expressions.where(mask, y_values, x_values) return self.combine(other, combiner, overwrite=False) @deprecate_kwarg(old_arg_name='raise_conflict', new_arg_name='errors', mapping={False: 'ignore', True: 'raise'}) def update(self, other, join='left', overwrite=True, filter_func=None, errors='ignore'): """ Modify in place using non-NA values from another DataFrame. Aligns on indices. There is no return value. Parameters ---------- other : DataFrame, or object coercible into a DataFrame Should have at least one matching index/column label with the original DataFrame. If a Series is passed, its name attribute must be set, and that will be used as the column name to align with the original DataFrame. join : {'left'}, default 'left' Only left join is implemented, keeping the index and columns of the original object. overwrite : bool, default True How to handle non-NA values for overlapping keys: * True: overwrite original DataFrame's values with values from `other`. * False: only update values that are NA in the original DataFrame. filter_func : callable(1d-array) -> bool 1d-array, optional Can choose to replace values other than NA. Return True for values that should be updated. errors : {'raise', 'ignore'}, default 'ignore' If 'raise', will raise a ValueError if the DataFrame and `other` both contain non-NA data in the same place. .. versionchanged :: 0.24.0 Changed from `raise_conflict=False|True` to `errors='ignore'|'raise'`. Returns ------- None : method directly changes calling object Raises ------ ValueError * When `errors='raise'` and there's overlapping non-NA data. * When `errors` is not either `'ignore'` or `'raise'` NotImplementedError * If `join != 'left'` See Also -------- dict.update : Similar method for dictionaries. DataFrame.merge : For column(s)-on-columns(s) operations. Examples -------- >>> df = pd.DataFrame({'A': [1, 2, 3], ... 'B': [400, 500, 600]}) >>> new_df = pd.DataFrame({'B': [4, 5, 6], ... 'C': [7, 8, 9]}) >>> df.update(new_df) >>> df A B 0 1 4 1 2 5 2 3 6 The DataFrame's length does not increase as a result of the update, only values at matching index/column labels are updated. >>> df = pd.DataFrame({'A': ['a', 'b', 'c'], ... 'B': ['x', 'y', 'z']}) >>> new_df = pd.DataFrame({'B': ['d', 'e', 'f', 'g', 'h', 'i']}) >>> df.update(new_df) >>> df A B 0 a d 1 b e 2 c f For Series, it's name attribute must be set. >>> df = pd.DataFrame({'A': ['a', 'b', 'c'], ... 'B': ['x', 'y', 'z']}) >>> new_column = pd.Series(['d', 'e'], name='B', index=[0, 2]) >>> df.update(new_column) >>> df A B 0 a d 1 b y 2 c e >>> df = pd.DataFrame({'A': ['a', 'b', 'c'], ... 'B': ['x', 'y', 'z']}) >>> new_df = pd.DataFrame({'B': ['d', 'e']}, index=[1, 2]) >>> df.update(new_df) >>> df A B 0 a x 1 b d 2 c e If `other` contains NaNs the corresponding values are not updated in the original dataframe. >>> df = pd.DataFrame({'A': [1, 2, 3], ... 'B': [400, 500, 600]}) >>> new_df = pd.DataFrame({'B': [4, np.nan, 6]}) >>> df.update(new_df) >>> df A B 0 1 4.0 1 2 500.0 2 3 6.0 """ import pandas.core.computation.expressions as expressions # TODO: Support other joins if join != 'left': # pragma: no cover raise NotImplementedError("Only left join is supported") if errors not in ['ignore', 'raise']: raise ValueError("The parameter errors must be either " "'ignore' or 'raise'") if not isinstance(other, DataFrame): other = DataFrame(other) other = other.reindex_like(self) for col in self.columns: this = self[col]._values that = other[col]._values if filter_func is not None: with np.errstate(all='ignore'): mask = ~filter_func(this) | isna(that) else: if errors == 'raise': mask_this = notna(that) mask_that = notna(this) if any(mask_this & mask_that): raise ValueError("Data overlaps.") if overwrite: mask = isna(that) else: mask = notna(this) # don't overwrite columns unnecessarily if mask.all(): continue self[col] = expressions.where(mask, this, that) # ---------------------------------------------------------------------- # Data reshaping _shared_docs['pivot'] = """ Return reshaped DataFrame organized by given index / column values. Reshape data (produce a "pivot" table) based on column values. Uses unique values from specified `index` / `columns` to form axes of the resulting DataFrame. This function does not support data aggregation, multiple values will result in a MultiIndex in the columns. See the :ref:`User Guide <reshaping>` for more on reshaping. Parameters ----------%s index : string or object, optional Column to use to make new frame's index. If None, uses existing index. columns : string or object Column to use to make new frame's columns. values : string, object or a list of the previous, optional Column(s) to use for populating new frame's values. If not specified, all remaining columns will be used and the result will have hierarchically indexed columns. .. versionchanged :: 0.23.0 Also accept list of column names. Returns ------- DataFrame Returns reshaped DataFrame. Raises ------ ValueError: When there are any `index`, `columns` combinations with multiple values. `DataFrame.pivot_table` when you need to aggregate. See Also -------- DataFrame.pivot_table : Generalization of pivot that can handle duplicate values for one index/column pair. DataFrame.unstack : Pivot based on the index values instead of a column. Notes ----- For finer-tuned control, see hierarchical indexing documentation along with the related stack/unstack methods. Examples -------- >>> df = pd.DataFrame({'foo': ['one', 'one', 'one', 'two', 'two', ... 'two'], ... 'bar': ['A', 'B', 'C', 'A', 'B', 'C'], ... 'baz': [1, 2, 3, 4, 5, 6], ... 'zoo': ['x', 'y', 'z', 'q', 'w', 't']}) >>> df foo bar baz zoo 0 one A 1 x 1 one B 2 y 2 one C 3 z 3 two A 4 q 4 two B 5 w 5 two C 6 t >>> df.pivot(index='foo', columns='bar', values='baz') bar A B C foo one 1 2 3 two 4 5 6 >>> df.pivot(index='foo', columns='bar')['baz'] bar A B C foo one 1 2 3 two 4 5 6 >>> df.pivot(index='foo', columns='bar', values=['baz', 'zoo']) baz zoo bar A B C A B C foo one 1 2 3 x y z two 4 5 6 q w t A ValueError is raised if there are any duplicates. >>> df = pd.DataFrame({"foo": ['one', 'one', 'two', 'two'], ... "bar": ['A', 'A', 'B', 'C'], ... "baz": [1, 2, 3, 4]}) >>> df foo bar baz 0 one A 1 1 one A 2 2 two B 3 3 two C 4 Notice that the first two rows are the same for our `index` and `columns` arguments. >>> df.pivot(index='foo', columns='bar', values='baz') Traceback (most recent call last): ... ValueError: Index contains duplicate entries, cannot reshape """ @Substitution('') @Appender(_shared_docs['pivot']) def pivot(self, index=None, columns=None, values=None): from pandas.core.reshape.pivot import pivot return pivot(self, index=index, columns=columns, values=values) _shared_docs['pivot_table'] = """ Create a spreadsheet-style pivot table as a DataFrame. The levels in the pivot table will be stored in MultiIndex objects (hierarchical indexes) on the index and columns of the result DataFrame. Parameters ----------%s values : column to aggregate, optional index : column, Grouper, array, or list of the previous If an array is passed, it must be the same length as the data. The list can contain any of the other types (except list). Keys to group by on the pivot table index. If an array is passed, it is being used as the same manner as column values. columns : column, Grouper, array, or list of the previous If an array is passed, it must be the same length as the data. The list can contain any of the other types (except list). Keys to group by on the pivot table column. If an array is passed, it is being used as the same manner as column values. aggfunc : function, list of functions, dict, default numpy.mean If list of functions passed, the resulting pivot table will have hierarchical columns whose top level are the function names (inferred from the function objects themselves) If dict is passed, the key is column to aggregate and value is function or list of functions fill_value : scalar, default None Value to replace missing values with margins : boolean, default False Add all row / columns (e.g. for subtotal / grand totals) dropna : boolean, default True Do not include columns whose entries are all NaN margins_name : string, default 'All' Name of the row / column that will contain the totals when margins is True. observed : boolean, default False This only applies if any of the groupers are Categoricals. If True: only show observed values for categorical groupers. If False: show all values for categorical groupers. .. versionchanged :: 0.25.0 Returns ------- DataFrame See Also -------- DataFrame.pivot : Pivot without aggregation that can handle non-numeric data. Examples -------- >>> df = pd.DataFrame({"A": ["foo", "foo", "foo", "foo", "foo", ... "bar", "bar", "bar", "bar"], ... "B": ["one", "one", "one", "two", "two", ... "one", "one", "two", "two"], ... "C": ["small", "large", "large", "small", ... "small", "large", "small", "small", ... "large"], ... "D": [1, 2, 2, 3, 3, 4, 5, 6, 7], ... "E": [2, 4, 5, 5, 6, 6, 8, 9, 9]}) >>> df A B C D E 0 foo one small 1 2 1 foo one large 2 4 2 foo one large 2 5 3 foo two small 3 5 4 foo two small 3 6 5 bar one large 4 6 6 bar one small 5 8 7 bar two small 6 9 8 bar two large 7 9 This first example aggregates values by taking the sum. >>> table = pd.pivot_table(df, values='D', index=['A', 'B'], ... columns=['C'], aggfunc=np.sum) >>> table C large small A B bar one 4.0 5.0 two 7.0 6.0 foo one 4.0 1.0 two NaN 6.0 We can also fill missing values using the `fill_value` parameter. >>> table = pd.pivot_table(df, values='D', index=['A', 'B'], ... columns=['C'], aggfunc=np.sum, fill_value=0) >>> table C large small A B bar one 4 5 two 7 6 foo one 4 1 two 0 6 The next example aggregates by taking the mean across multiple columns. >>> table = pd.pivot_table(df, values=['D', 'E'], index=['A', 'C'], ... aggfunc={'D': np.mean, ... 'E': np.mean}) >>> table D E A C bar large 5.500000 7.500000 small 5.500000 8.500000 foo large 2.000000 4.500000 small 2.333333 4.333333 We can also calculate multiple types of aggregations for any given value column. >>> table = pd.pivot_table(df, values=['D', 'E'], index=['A', 'C'], ... aggfunc={'D': np.mean, ... 'E': [min, max, np.mean]}) >>> table D E mean max mean min A C bar large 5.500000 9.0 7.500000 6.0 small 5.500000 9.0 8.500000 8.0 foo large 2.000000 5.0 4.500000 4.0 small 2.333333 6.0 4.333333 2.0 """ @Substitution('') @Appender(_shared_docs['pivot_table']) def pivot_table(self, values=None, index=None, columns=None, aggfunc='mean', fill_value=None, margins=False, dropna=True, margins_name='All', observed=False): from pandas.core.reshape.pivot import pivot_table return pivot_table(self, values=values, index=index, columns=columns, aggfunc=aggfunc, fill_value=fill_value, margins=margins, dropna=dropna, margins_name=margins_name, observed=observed) def stack(self, level=-1, dropna=True): """ Stack the prescribed level(s) from columns to index. Return a reshaped DataFrame or Series having a multi-level index with one or more new inner-most levels compared to the current DataFrame. The new inner-most levels are created by pivoting the columns of the current dataframe: - if the columns have a single level, the output is a Series; - if the columns have multiple levels, the new index level(s) is (are) taken from the prescribed level(s) and the output is a DataFrame. The new index levels are sorted. Parameters ---------- level : int, str, list, default -1 Level(s) to stack from the column axis onto the index axis, defined as one index or label, or a list of indices or labels. dropna : bool, default True Whether to drop rows in the resulting Frame/Series with missing values. Stacking a column level onto the index axis can create combinations of index and column values that are missing from the original dataframe. See Examples section. Returns ------- DataFrame or Series Stacked dataframe or series. See Also -------- DataFrame.unstack : Unstack prescribed level(s) from index axis onto column axis. DataFrame.pivot : Reshape dataframe from long format to wide format. DataFrame.pivot_table : Create a spreadsheet-style pivot table as a DataFrame. Notes ----- The function is named by analogy with a collection of books being reorganized from being side by side on a horizontal position (the columns of the dataframe) to being stacked vertically on top of each other (in the index of the dataframe). Examples -------- **Single level columns** >>> df_single_level_cols = pd.DataFrame([[0, 1], [2, 3]], ... index=['cat', 'dog'], ... columns=['weight', 'height']) Stacking a dataframe with a single level column axis returns a Series: >>> df_single_level_cols weight height cat 0 1 dog 2 3 >>> df_single_level_cols.stack() cat weight 0 height 1 dog weight 2 height 3 dtype: int64 **Multi level columns: simple case** >>> multicol1 = pd.MultiIndex.from_tuples([('weight', 'kg'), ... ('weight', 'pounds')]) >>> df_multi_level_cols1 = pd.DataFrame([[1, 2], [2, 4]], ... index=['cat', 'dog'], ... columns=multicol1) Stacking a dataframe with a multi-level column axis: >>> df_multi_level_cols1 weight kg pounds cat 1 2 dog 2 4 >>> df_multi_level_cols1.stack() weight cat kg 1 pounds 2 dog kg 2 pounds 4 **Missing values** >>> multicol2 = pd.MultiIndex.from_tuples([('weight', 'kg'), ... ('height', 'm')]) >>> df_multi_level_cols2 = pd.DataFrame([[1.0, 2.0], [3.0, 4.0]], ... index=['cat', 'dog'], ... columns=multicol2) It is common to have missing values when stacking a dataframe with multi-level columns, as the stacked dataframe typically has more values than the original dataframe. Missing values are filled with NaNs: >>> df_multi_level_cols2 weight height kg m cat 1.0 2.0 dog 3.0 4.0 >>> df_multi_level_cols2.stack() height weight cat kg NaN 1.0 m 2.0 NaN dog kg NaN 3.0 m 4.0 NaN **Prescribing the level(s) to be stacked** The first parameter controls which level or levels are stacked: >>> df_multi_level_cols2.stack(0) kg m cat height NaN 2.0 weight 1.0 NaN dog height NaN 4.0 weight 3.0 NaN >>> df_multi_level_cols2.stack([0, 1]) cat height m 2.0 weight kg 1.0 dog height m 4.0 weight kg 3.0 dtype: float64 **Dropping missing values** >>> df_multi_level_cols3 = pd.DataFrame([[None, 1.0], [2.0, 3.0]], ... index=['cat', 'dog'], ... columns=multicol2) Note that rows where all values are missing are dropped by default but this behaviour can be controlled via the dropna keyword parameter: >>> df_multi_level_cols3 weight height kg m cat NaN 1.0 dog 2.0 3.0 >>> df_multi_level_cols3.stack(dropna=False) height weight cat kg NaN NaN m 1.0 NaN dog kg NaN 2.0 m 3.0 NaN >>> df_multi_level_cols3.stack(dropna=True) height weight cat m 1.0 NaN dog kg NaN 2.0 m 3.0 NaN """ from pandas.core.reshape.reshape import stack, stack_multiple if isinstance(level, (tuple, list)): return stack_multiple(self, level, dropna=dropna) else: return stack(self, level, dropna=dropna) def unstack(self, level=-1, fill_value=None): """ Pivot a level of the (necessarily hierarchical) index labels, returning a DataFrame having a new level of column labels whose inner-most level consists of the pivoted index labels. If the index is not a MultiIndex, the output will be a Series (the analogue of stack when the columns are not a MultiIndex). The level involved will automatically get sorted. Parameters ---------- level : int, string, or list of these, default -1 (last level) Level(s) of index to unstack, can pass level name fill_value : replace NaN with this value if the unstack produces missing values .. versionadded:: 0.18.0 Returns ------- Series or DataFrame See Also -------- DataFrame.pivot : Pivot a table based on column values. DataFrame.stack : Pivot a level of the column labels (inverse operation from `unstack`). Examples -------- >>> index = pd.MultiIndex.from_tuples([('one', 'a'), ('one', 'b'), ... ('two', 'a'), ('two', 'b')]) >>> s = pd.Series(np.arange(1.0, 5.0), index=index) >>> s one a 1.0 b 2.0 two a 3.0 b 4.0 dtype: float64 >>> s.unstack(level=-1) a b one 1.0 2.0 two 3.0 4.0 >>> s.unstack(level=0) one two a 1.0 3.0 b 2.0 4.0 >>> df = s.unstack(level=0) >>> df.unstack() one a 1.0 b 2.0 two a 3.0 b 4.0 dtype: float64 """ from pandas.core.reshape.reshape import unstack return unstack(self, level, fill_value) _shared_docs['melt'] = (""" Unpivot a DataFrame from wide format to long format, optionally leaving identifier variables set. This function is useful to massage a DataFrame into a format where one or more columns are identifier variables (`id_vars`), while all other columns, considered measured variables (`value_vars`), are "unpivoted" to the row axis, leaving just two non-identifier columns, 'variable' and 'value'. %(versionadded)s Parameters ---------- frame : DataFrame id_vars : tuple, list, or ndarray, optional Column(s) to use as identifier variables. value_vars : tuple, list, or ndarray, optional Column(s) to unpivot. If not specified, uses all columns that are not set as `id_vars`. var_name : scalar Name to use for the 'variable' column. If None it uses ``frame.columns.name`` or 'variable'. value_name : scalar, default 'value' Name to use for the 'value' column. col_level : int or string, optional If columns are a MultiIndex then use this level to melt. Returns ------- DataFrame Unpivoted DataFrame. See Also -------- %(other)s pivot_table DataFrame.pivot Examples -------- >>> df = pd.DataFrame({'A': {0: 'a', 1: 'b', 2: 'c'}, ... 'B': {0: 1, 1: 3, 2: 5}, ... 'C': {0: 2, 1: 4, 2: 6}}) >>> df A B C 0 a 1 2 1 b 3 4 2 c 5 6 >>> %(caller)sid_vars=['A'], value_vars=['B']) A variable value 0 a B 1 1 b B 3 2 c B 5 >>> %(caller)sid_vars=['A'], value_vars=['B', 'C']) A variable value 0 a B 1 1 b B 3 2 c B 5 3 a C 2 4 b C 4 5 c C 6 The names of 'variable' and 'value' columns can be customized: >>> %(caller)sid_vars=['A'], value_vars=['B'], ... var_name='myVarname', value_name='myValname') A myVarname myValname 0 a B 1 1 b B 3 2 c B 5 If you have multi-index columns: >>> df.columns = [list('ABC'), list('DEF')] >>> df A B C D E F 0 a 1 2 1 b 3 4 2 c 5 6 >>> %(caller)scol_level=0, id_vars=['A'], value_vars=['B']) A variable value 0 a B 1 1 b B 3 2 c B 5 >>> %(caller)sid_vars=[('A', 'D')], value_vars=[('B', 'E')]) (A, D) variable_0 variable_1 value 0 a B E 1 1 b B E 3 2 c B E 5 """) @Appender(_shared_docs['melt'] % dict(caller='df.melt(', versionadded='.. versionadded:: 0.20.0\n', other='melt')) def melt(self, id_vars=None, value_vars=None, var_name=None, value_name='value', col_level=None): from pandas.core.reshape.melt import melt return melt(self, id_vars=id_vars, value_vars=value_vars, var_name=var_name, value_name=value_name, col_level=col_level) # ---------------------------------------------------------------------- # Time series-related def diff(self, periods=1, axis=0): """ First discrete difference of element. Calculates the difference of a DataFrame element compared with another element in the DataFrame (default is the element in the same column of the previous row). Parameters ---------- periods : int, default 1 Periods to shift for calculating difference, accepts negative values. axis : {0 or 'index', 1 or 'columns'}, default 0 Take difference over rows (0) or columns (1). .. versionadded:: 0.16.1. Returns ------- DataFrame See Also -------- Series.diff: First discrete difference for a Series. DataFrame.pct_change: Percent change over given number of periods. DataFrame.shift: Shift index by desired number of periods with an optional time freq. Examples -------- Difference with previous row >>> df = pd.DataFrame({'a': [1, 2, 3, 4, 5, 6], ... 'b': [1, 1, 2, 3, 5, 8], ... 'c': [1, 4, 9, 16, 25, 36]}) >>> df a b c 0 1 1 1 1 2 1 4 2 3 2 9 3 4 3 16 4 5 5 25 5 6 8 36 >>> df.diff() a b c 0 NaN NaN NaN 1 1.0 0.0 3.0 2 1.0 1.0 5.0 3 1.0 1.0 7.0 4 1.0 2.0 9.0 5 1.0 3.0 11.0 Difference with previous column >>> df.diff(axis=1) a b c 0 NaN 0.0 0.0 1 NaN -1.0 3.0 2 NaN -1.0 7.0 3 NaN -1.0 13.0 4 NaN 0.0 20.0 5 NaN 2.0 28.0 Difference with 3rd previous row >>> df.diff(periods=3) a b c 0 NaN NaN NaN 1 NaN NaN NaN 2 NaN NaN NaN 3 3.0 2.0 15.0 4 3.0 4.0 21.0 5 3.0 6.0 27.0 Difference with following row >>> df.diff(periods=-1) a b c 0 -1.0 0.0 -3.0 1 -1.0 -1.0 -5.0 2 -1.0 -1.0 -7.0 3 -1.0 -2.0 -9.0 4 -1.0 -3.0 -11.0 5 NaN NaN NaN """ bm_axis = self._get_block_manager_axis(axis) new_data = self._data.diff(n=periods, axis=bm_axis) return self._constructor(new_data) # ---------------------------------------------------------------------- # Function application def _gotitem(self, key: Union[str, List[str]], ndim: int, subset: Optional[Union[Series, ABCDataFrame]] = None, ) -> Union[Series, ABCDataFrame]: """ Sub-classes to define. Return a sliced object. Parameters ---------- key : string / list of selections ndim : 1,2 requested ndim of result subset : object, default None subset to act on """ if subset is None: subset = self elif subset.ndim == 1: # is Series return subset # TODO: _shallow_copy(subset)? return subset[key] _agg_summary_and_see_also_doc = dedent(""" The aggregation operations are always performed over an axis, either the index (default) or the column axis. This behavior is different from `numpy` aggregation functions (`mean`, `median`, `prod`, `sum`, `std`, `var`), where the default is to compute the aggregation of the flattened array, e.g., ``numpy.mean(arr_2d)`` as opposed to ``numpy.mean(arr_2d, axis=0)``. `agg` is an alias for `aggregate`. Use the alias. See Also -------- DataFrame.apply : Perform any type of operations. DataFrame.transform : Perform transformation type operations. core.groupby.GroupBy : Perform operations over groups. core.resample.Resampler : Perform operations over resampled bins. core.window.Rolling : Perform operations over rolling window. core.window.Expanding : Perform operations over expanding window. core.window.EWM : Perform operation over exponential weighted window. """) _agg_examples_doc = dedent(""" Examples -------- >>> df = pd.DataFrame([[1, 2, 3], ... [4, 5, 6], ... [7, 8, 9], ... [np.nan, np.nan, np.nan]], ... columns=['A', 'B', 'C']) Aggregate these functions over the rows. >>> df.agg(['sum', 'min']) A B C sum 12.0 15.0 18.0 min 1.0 2.0 3.0 Different aggregations per column. >>> df.agg({'A' : ['sum', 'min'], 'B' : ['min', 'max']}) A B max NaN 8.0 min 1.0 2.0 sum 12.0 NaN Aggregate over the columns. >>> df.agg("mean", axis="columns") 0 2.0 1 5.0 2 8.0 3 NaN dtype: float64 """) @Substitution(see_also=_agg_summary_and_see_also_doc, examples=_agg_examples_doc, versionadded='\n.. versionadded:: 0.20.0\n', **_shared_doc_kwargs) @Appender(_shared_docs['aggregate']) def aggregate(self, func, axis=0, *args, **kwargs): axis = self._get_axis_number(axis) result = None try: result, how = self._aggregate(func, axis=axis, *args, **kwargs) except TypeError: pass if result is None: return self.apply(func, axis=axis, args=args, **kwargs) return result def _aggregate(self, arg, axis=0, *args, **kwargs): if axis == 1: # NDFrame.aggregate returns a tuple, and we need to transpose # only result result, how = self.T._aggregate(arg, *args, **kwargs) result = result.T if result is not None else result return result, how return super()._aggregate(arg, *args, **kwargs) agg = aggregate @Appender(_shared_docs['transform'] % _shared_doc_kwargs) def transform(self, func, axis=0, *args, **kwargs): axis = self._get_axis_number(axis) if axis == 1: return self.T.transform(func, *args, **kwargs).T return super().transform(func, *args, **kwargs) def apply(self, func, axis=0, broadcast=None, raw=False, reduce=None, result_type=None, args=(), **kwds): """ Apply a function along an axis of the DataFrame. Objects passed to the function are Series objects whose index is either the DataFrame's index (``axis=0``) or the DataFrame's columns (``axis=1``). By default (``result_type=None``), the final return type is inferred from the return type of the applied function. Otherwise, it depends on the `result_type` argument. Parameters ---------- func : function Function to apply to each column or row. axis : {0 or 'index', 1 or 'columns'}, default 0 Axis along which the function is applied: * 0 or 'index': apply function to each column. * 1 or 'columns': apply function to each row. broadcast : bool, optional Only relevant for aggregation functions: * ``False`` or ``None`` : returns a Series whose length is the length of the index or the number of columns (based on the `axis` parameter) * ``True`` : results will be broadcast to the original shape of the frame, the original index and columns will be retained. .. deprecated:: 0.23.0 This argument will be removed in a future version, replaced by result_type='broadcast'. raw : bool, default False * ``False`` : passes each row or column as a Series to the function. * ``True`` : the passed function will receive ndarray objects instead. If you are just applying a NumPy reduction function this will achieve much better performance. reduce : bool or None, default None Try to apply reduction procedures. If the DataFrame is empty, `apply` will use `reduce` to determine whether the result should be a Series or a DataFrame. If ``reduce=None`` (the default), `apply`'s return value will be guessed by calling `func` on an empty Series (note: while guessing, exceptions raised by `func` will be ignored). If ``reduce=True`` a Series will always be returned, and if ``reduce=False`` a DataFrame will always be returned. .. deprecated:: 0.23.0 This argument will be removed in a future version, replaced by ``result_type='reduce'``. result_type : {'expand', 'reduce', 'broadcast', None}, default None These only act when ``axis=1`` (columns): * 'expand' : list-like results will be turned into columns. * 'reduce' : returns a Series if possible rather than expanding list-like results. This is the opposite of 'expand'. * 'broadcast' : results will be broadcast to the original shape of the DataFrame, the original index and columns will be retained. The default behaviour (None) depends on the return value of the applied function: list-like results will be returned as a Series of those. However if the apply function returns a Series these are expanded to columns. .. versionadded:: 0.23.0 args : tuple Positional arguments to pass to `func` in addition to the array/series. **kwds Additional keyword arguments to pass as keywords arguments to `func`. Returns ------- Series or DataFrame Result of applying ``func`` along the given axis of the DataFrame. See Also -------- DataFrame.applymap: For elementwise operations. DataFrame.aggregate: Only perform aggregating type operations. DataFrame.transform: Only perform transforming type operations. Notes ----- In the current implementation apply calls `func` twice on the first column/row to decide whether it can take a fast or slow code path. This can lead to unexpected behavior if `func` has side-effects, as they will take effect twice for the first column/row. Examples -------- >>> df = pd.DataFrame([[4, 9]] * 3, columns=['A', 'B']) >>> df A B 0 4 9 1 4 9 2 4 9 Using a numpy universal function (in this case the same as ``np.sqrt(df)``): >>> df.apply(np.sqrt) A B 0 2.0 3.0 1 2.0 3.0 2 2.0 3.0 Using a reducing function on either axis >>> df.apply(np.sum, axis=0) A 12 B 27 dtype: int64 >>> df.apply(np.sum, axis=1) 0 13 1 13 2 13 dtype: int64 Returning a list-like will result in a Series >>> df.apply(lambda x: [1, 2], axis=1) 0 [1, 2] 1 [1, 2] 2 [1, 2] dtype: object Passing result_type='expand' will expand list-like results to columns of a Dataframe >>> df.apply(lambda x: [1, 2], axis=1, result_type='expand') 0 1 0 1 2 1 1 2 2 1 2 Returning a Series inside the function is similar to passing ``result_type='expand'``. The resulting column names will be the Series index. >>> df.apply(lambda x: pd.Series([1, 2], index=['foo', 'bar']), axis=1) foo bar 0 1 2 1 1 2 2 1 2 Passing ``result_type='broadcast'`` will ensure the same shape result, whether list-like or scalar is returned by the function, and broadcast it along the axis. The resulting column names will be the originals. >>> df.apply(lambda x: [1, 2], axis=1, result_type='broadcast') A B 0 1 2 1 1 2 2 1 2 """ from pandas.core.apply import frame_apply op = frame_apply(self, func=func, axis=axis, broadcast=broadcast, raw=raw, reduce=reduce, result_type=result_type, args=args, kwds=kwds) return op.get_result() def applymap(self, func): """ Apply a function to a Dataframe elementwise. This method applies a function that accepts and returns a scalar to every element of a DataFrame. Parameters ---------- func : callable Python function, returns a single value from a single value. Returns ------- DataFrame Transformed DataFrame. See Also -------- DataFrame.apply : Apply a function along input axis of DataFrame. Notes ----- In the current implementation applymap calls `func` twice on the first column/row to decide whether it can take a fast or slow code path. This can lead to unexpected behavior if `func` has side-effects, as they will take effect twice for the first column/row. Examples -------- >>> df = pd.DataFrame([[1, 2.12], [3.356, 4.567]]) >>> df 0 1 0 1.000 2.120 1 3.356 4.567 >>> df.applymap(lambda x: len(str(x))) 0 1 0 3 4 1 5 5 Note that a vectorized version of `func` often exists, which will be much faster. You could square each number elementwise. >>> df.applymap(lambda x: x**2) 0 1 0 1.000000 4.494400 1 11.262736 20.857489 But it's better to avoid applymap in that case. >>> df ** 2 0 1 0 1.000000 4.494400 1 11.262736 20.857489 """ # if we have a dtype == 'M8[ns]', provide boxed values def infer(x): if x.empty: return lib.map_infer(x, func) return lib.map_infer(x.astype(object).values, func) return self.apply(infer) # ---------------------------------------------------------------------- # Merging / joining methods def append(self, other, ignore_index=False, verify_integrity=False, sort=None): """ Append rows of `other` to the end of caller, returning a new object. Columns in `other` that are not in the caller are added as new columns. Parameters ---------- other : DataFrame or Series/dict-like object, or list of these The data to append. ignore_index : boolean, default False If True, do not use the index labels. verify_integrity : boolean, default False If True, raise ValueError on creating index with duplicates. sort : boolean, default None Sort columns if the columns of `self` and `other` are not aligned. The default sorting is deprecated and will change to not-sorting in a future version of pandas. Explicitly pass ``sort=True`` to silence the warning and sort. Explicitly pass ``sort=False`` to silence the warning and not sort. .. versionadded:: 0.23.0 Returns ------- DataFrame See Also -------- concat : General function to concatenate DataFrame or Series objects. Notes ----- If a list of dict/series is passed and the keys are all contained in the DataFrame's index, the order of the columns in the resulting DataFrame will be unchanged. Iteratively appending rows to a DataFrame can be more computationally intensive than a single concatenate. A better solution is to append those rows to a list and then concatenate the list with the original DataFrame all at once. Examples -------- >>> df = pd.DataFrame([[1, 2], [3, 4]], columns=list('AB')) >>> df A B 0 1 2 1 3 4 >>> df2 = pd.DataFrame([[5, 6], [7, 8]], columns=list('AB')) >>> df.append(df2) A B 0 1 2 1 3 4 0 5 6 1 7 8 With `ignore_index` set to True: >>> df.append(df2, ignore_index=True) A B 0 1 2 1 3 4 2 5 6 3 7 8 The following, while not recommended methods for generating DataFrames, show two ways to generate a DataFrame from multiple data sources. Less efficient: >>> df = pd.DataFrame(columns=['A']) >>> for i in range(5): ... df = df.append({'A': i}, ignore_index=True) >>> df A 0 0 1 1 2 2 3 3 4 4 More efficient: >>> pd.concat([pd.DataFrame([i], columns=['A']) for i in range(5)], ... ignore_index=True) A 0 0 1 1 2 2 3 3 4 4 """ if isinstance(other, (Series, dict)): if isinstance(other, dict): other = Series(other) if other.name is None and not ignore_index: raise TypeError('Can only append a Series if ignore_index=True' ' or if the Series has a name') if other.name is None: index = None else: # other must have the same index name as self, otherwise # index name will be reset index = Index([other.name], name=self.index.name) idx_diff = other.index.difference(self.columns) try: combined_columns = self.columns.append(idx_diff) except TypeError: combined_columns = self.columns.astype(object).append(idx_diff) other = other.reindex(combined_columns, copy=False) other = DataFrame(other.values.reshape((1, len(other))), index=index, columns=combined_columns) other = other._convert(datetime=True, timedelta=True) if not self.columns.equals(combined_columns): self = self.reindex(columns=combined_columns) elif isinstance(other, list) and not isinstance(other[0], DataFrame): other = DataFrame(other) if (self.columns.get_indexer(other.columns) >= 0).all(): other = other.reindex(columns=self.columns) from pandas.core.reshape.concat import concat if isinstance(other, (list, tuple)): to_concat = [self] + other else: to_concat = [self, other] return concat(to_concat, ignore_index=ignore_index, verify_integrity=verify_integrity, sort=sort) def join(self, other, on=None, how='left', lsuffix='', rsuffix='', sort=False): """ Join columns of another DataFrame. Join columns with `other` DataFrame either on index or on a key column. Efficiently join multiple DataFrame objects by index at once by passing a list. Parameters ---------- other : DataFrame, Series, or list of DataFrame Index should be similar to one of the columns in this one. If a Series is passed, its name attribute must be set, and that will be used as the column name in the resulting joined DataFrame. on : str, list of str, or array-like, optional Column or index level name(s) in the caller to join on the index in `other`, otherwise joins index-on-index. If multiple values given, the `other` DataFrame must have a MultiIndex. Can pass an array as the join key if it is not already contained in the calling DataFrame. Like an Excel VLOOKUP operation. how : {'left', 'right', 'outer', 'inner'}, default 'left' How to handle the operation of the two objects. * left: use calling frame's index (or column if on is specified) * right: use `other`'s index. * outer: form union of calling frame's index (or column if on is specified) with `other`'s index, and sort it. lexicographically. * inner: form intersection of calling frame's index (or column if on is specified) with `other`'s index, preserving the order of the calling's one. lsuffix : str, default '' Suffix to use from left frame's overlapping columns. rsuffix : str, default '' Suffix to use from right frame's overlapping columns. sort : bool, default False Order result DataFrame lexicographically by the join key. If False, the order of the join key depends on the join type (how keyword). Returns ------- DataFrame A dataframe containing columns from both the caller and `other`. See Also -------- DataFrame.merge : For column(s)-on-columns(s) operations. Notes ----- Parameters `on`, `lsuffix`, and `rsuffix` are not supported when passing a list of `DataFrame` objects. Support for specifying index levels as the `on` parameter was added in version 0.23.0. Examples -------- >>> df = pd.DataFrame({'key': ['K0', 'K1', 'K2', 'K3', 'K4', 'K5'], ... 'A': ['A0', 'A1', 'A2', 'A3', 'A4', 'A5']}) >>> df key A 0 K0 A0 1 K1 A1 2 K2 A2 3 K3 A3 4 K4 A4 5 K5 A5 >>> other = pd.DataFrame({'key': ['K0', 'K1', 'K2'], ... 'B': ['B0', 'B1', 'B2']}) >>> other key B 0 K0 B0 1 K1 B1 2 K2 B2 Join DataFrames using their indexes. >>> df.join(other, lsuffix='_caller', rsuffix='_other') key_caller A key_other B 0 K0 A0 K0 B0 1 K1 A1 K1 B1 2 K2 A2 K2 B2 3 K3 A3 NaN NaN 4 K4 A4 NaN NaN 5 K5 A5 NaN NaN If we want to join using the key columns, we need to set key to be the index in both `df` and `other`. The joined DataFrame will have key as its index. >>> df.set_index('key').join(other.set_index('key')) A B key K0 A0 B0 K1 A1 B1 K2 A2 B2 K3 A3 NaN K4 A4 NaN K5 A5 NaN Another option to join using the key columns is to use the `on` parameter. DataFrame.join always uses `other`'s index but we can use any column in `df`. This method preserves the original DataFrame's index in the result. >>> df.join(other.set_index('key'), on='key') key A B 0 K0 A0 B0 1 K1 A1 B1 2 K2 A2 B2 3 K3 A3 NaN 4 K4 A4 NaN 5 K5 A5 NaN """ # For SparseDataFrame's benefit return self._join_compat(other, on=on, how=how, lsuffix=lsuffix, rsuffix=rsuffix, sort=sort) def _join_compat(self, other, on=None, how='left', lsuffix='', rsuffix='', sort=False): from pandas.core.reshape.merge import merge from pandas.core.reshape.concat import concat if isinstance(other, Series): if other.name is None: raise ValueError('Other Series must have a name') other = DataFrame({other.name: other}) if isinstance(other, DataFrame): return merge(self, other, left_on=on, how=how, left_index=on is None, right_index=True, suffixes=(lsuffix, rsuffix), sort=sort) else: if on is not None: raise ValueError('Joining multiple DataFrames only supported' ' for joining on index') frames = [self] + list(other) can_concat = all(df.index.is_unique for df in frames) # join indexes only using concat if can_concat: if how == 'left': how = 'outer' join_axes = [self.index] else: join_axes = None return concat(frames, axis=1, join=how, join_axes=join_axes, verify_integrity=True) joined = frames[0] for frame in frames[1:]: joined = merge(joined, frame, how=how, left_index=True, right_index=True) return joined @Substitution('') @Appender(_merge_doc, indents=2) def merge(self, right, how='inner', on=None, left_on=None, right_on=None, left_index=False, right_index=False, sort=False, suffixes=('_x', '_y'), copy=True, indicator=False, validate=None): from pandas.core.reshape.merge import merge return merge(self, right, how=how, on=on, left_on=left_on, right_on=right_on, left_index=left_index, right_index=right_index, sort=sort, suffixes=suffixes, copy=copy, indicator=indicator, validate=validate) def round(self, decimals=0, *args, **kwargs): """ Round a DataFrame to a variable number of decimal places. Parameters ---------- decimals : int, dict, Series Number of decimal places to round each column to. If an int is given, round each column to the same number of places. Otherwise dict and Series round to variable numbers of places. Column names should be in the keys if `decimals` is a dict-like, or in the index if `decimals` is a Series. Any columns not included in `decimals` will be left as is. Elements of `decimals` which are not columns of the input will be ignored. *args Additional keywords have no effect but might be accepted for compatibility with numpy. **kwargs Additional keywords have no effect but might be accepted for compatibility with numpy. Returns ------- DataFrame A DataFrame with the affected columns rounded to the specified number of decimal places. See Also -------- numpy.around : Round a numpy array to the given number of decimals. Series.round : Round a Series to the given number of decimals. Examples -------- >>> df = pd.DataFrame([(.21, .32), (.01, .67), (.66, .03), (.21, .18)], ... columns=['dogs', 'cats']) >>> df dogs cats 0 0.21 0.32 1 0.01 0.67 2 0.66 0.03 3 0.21 0.18 By providing an integer each column is rounded to the same number of decimal places >>> df.round(1) dogs cats 0 0.2 0.3 1 0.0 0.7 2 0.7 0.0 3 0.2 0.2 With a dict, the number of places for specific columns can be specified with the column names as key and the number of decimal places as value >>> df.round({'dogs': 1, 'cats': 0}) dogs cats 0 0.2 0.0 1 0.0 1.0 2 0.7 0.0 3 0.2 0.0 Using a Series, the number of places for specific columns can be specified with the column names as index and the number of decimal places as value >>> decimals = pd.Series([0, 1], index=['cats', 'dogs']) >>> df.round(decimals) dogs cats 0 0.2 0.0 1 0.0 1.0 2 0.7 0.0 3 0.2 0.0 """ from pandas.core.reshape.concat import concat def _dict_round(df, decimals): for col, vals in df.iteritems(): try: yield _series_round(vals, decimals[col]) except KeyError: yield vals def _series_round(s, decimals): if is_integer_dtype(s) or is_float_dtype(s): return s.round(decimals) return s nv.validate_round(args, kwargs) if isinstance(decimals, (dict, Series)): if isinstance(decimals, Series): if not decimals.index.is_unique: raise ValueError("Index of decimals must be unique") new_cols = [col for col in _dict_round(self, decimals)] elif is_integer(decimals): # Dispatch to Series.round new_cols = [_series_round(v, decimals) for _, v in self.iteritems()] else: raise TypeError("decimals must be an integer, a dict-like or a " "Series") if len(new_cols) > 0: return self._constructor(concat(new_cols, axis=1), index=self.index, columns=self.columns) else: return self # ---------------------------------------------------------------------- # Statistical methods, etc. def corr(self, method='pearson', min_periods=1): """ Compute pairwise correlation of columns, excluding NA/null values. Parameters ---------- method : {'pearson', 'kendall', 'spearman'} or callable * pearson : standard correlation coefficient * kendall : Kendall Tau correlation coefficient * spearman : Spearman rank correlation * callable: callable with input two 1d ndarrays and returning a float. Note that the returned matrix from corr will have 1 along the diagonals and will be symmetric regardless of the callable's behavior .. versionadded:: 0.24.0 min_periods : int, optional Minimum number of observations required per pair of columns to have a valid result. Currently only available for Pearson and Spearman correlation. Returns ------- DataFrame Correlation matrix. See Also -------- DataFrame.corrwith Series.corr Examples -------- >>> def histogram_intersection(a, b): ... v = np.minimum(a, b).sum().round(decimals=1) ... return v >>> df = pd.DataFrame([(.2, .3), (.0, .6), (.6, .0), (.2, .1)], ... columns=['dogs', 'cats']) >>> df.corr(method=histogram_intersection) dogs cats dogs 1.0 0.3 cats 0.3 1.0 """ numeric_df = self._get_numeric_data() cols = numeric_df.columns idx = cols.copy() mat = numeric_df.values if method == 'pearson': correl = libalgos.nancorr(ensure_float64(mat), minp=min_periods) elif method == 'spearman': correl = libalgos.nancorr_spearman(ensure_float64(mat), minp=min_periods) elif method == 'kendall' or callable(method): if min_periods is None: min_periods = 1 mat = ensure_float64(mat).T corrf = nanops.get_corr_func(method) K = len(cols) correl = np.empty((K, K), dtype=float) mask = np.isfinite(mat) for i, ac in enumerate(mat): for j, bc in enumerate(mat): if i > j: continue valid = mask[i] & mask[j] if valid.sum() < min_periods: c = np.nan elif i == j: c = 1. elif not valid.all(): c = corrf(ac[valid], bc[valid]) else: c = corrf(ac, bc) correl[i, j] = c correl[j, i] = c else: raise ValueError("method must be either 'pearson', " "'spearman', 'kendall', or a callable, " "'{method}' was supplied".format(method=method)) return self._constructor(correl, index=idx, columns=cols) def cov(self, min_periods=None): """ Compute pairwise covariance of columns, excluding NA/null values. Compute the pairwise covariance among the series of a DataFrame. The returned data frame is the `covariance matrix <https://en.wikipedia.org/wiki/Covariance_matrix>`__ of the columns of the DataFrame. Both NA and null values are automatically excluded from the calculation. (See the note below about bias from missing values.) A threshold can be set for the minimum number of observations for each value created. Comparisons with observations below this threshold will be returned as ``NaN``. This method is generally used for the analysis of time series data to understand the relationship between different measures across time. Parameters ---------- min_periods : int, optional Minimum number of observations required per pair of columns to have a valid result. Returns ------- DataFrame The covariance matrix of the series of the DataFrame. See Also -------- Series.cov : Compute covariance with another Series. core.window.EWM.cov: Exponential weighted sample covariance. core.window.Expanding.cov : Expanding sample covariance. core.window.Rolling.cov : Rolling sample covariance. Notes ----- Returns the covariance matrix of the DataFrame's time series. The covariance is normalized by N-1. For DataFrames that have Series that are missing data (assuming that data is `missing at random <https://en.wikipedia.org/wiki/Missing_data#Missing_at_random>`__) the returned covariance matrix will be an unbiased estimate of the variance and covariance between the member Series. However, for many applications this estimate may not be acceptable because the estimate covariance matrix is not guaranteed to be positive semi-definite. This could lead to estimate correlations having absolute values which are greater than one, and/or a non-invertible covariance matrix. See `Estimation of covariance matrices <http://en.wikipedia.org/w/index.php?title=Estimation_of_covariance_ matrices>`__ for more details. Examples -------- >>> df = pd.DataFrame([(1, 2), (0, 3), (2, 0), (1, 1)], ... columns=['dogs', 'cats']) >>> df.cov() dogs cats dogs 0.666667 -1.000000 cats -1.000000 1.666667 >>> np.random.seed(42) >>> df = pd.DataFrame(np.random.randn(1000, 5), ... columns=['a', 'b', 'c', 'd', 'e']) >>> df.cov() a b c d e a 0.998438 -0.020161 0.059277 -0.008943 0.014144 b -0.020161 1.059352 -0.008543 -0.024738 0.009826 c 0.059277 -0.008543 1.010670 -0.001486 -0.000271 d -0.008943 -0.024738 -0.001486 0.921297 -0.013692 e 0.014144 0.009826 -0.000271 -0.013692 0.977795 **Minimum number of periods** This method also supports an optional ``min_periods`` keyword that specifies the required minimum number of non-NA observations for each column pair in order to have a valid result: >>> np.random.seed(42) >>> df = pd.DataFrame(np.random.randn(20, 3), ... columns=['a', 'b', 'c']) >>> df.loc[df.index[:5], 'a'] = np.nan >>> df.loc[df.index[5:10], 'b'] = np.nan >>> df.cov(min_periods=12) a b c a 0.316741 NaN -0.150812 b NaN 1.248003 0.191417 c -0.150812 0.191417 0.895202 """ numeric_df = self._get_numeric_data() cols = numeric_df.columns idx = cols.copy() mat = numeric_df.values if notna(mat).all(): if min_periods is not None and min_periods > len(mat): baseCov = np.empty((mat.shape[1], mat.shape[1])) baseCov.fill(np.nan) else: baseCov = np.cov(mat.T) baseCov = baseCov.reshape((len(cols), len(cols))) else: baseCov = libalgos.nancorr(ensure_float64(mat), cov=True, minp=min_periods) return self._constructor(baseCov, index=idx, columns=cols) def corrwith(self, other, axis=0, drop=False, method='pearson'): """ Compute pairwise correlation between rows or columns of DataFrame with rows or columns of Series or DataFrame. DataFrames are first aligned along both axes before computing the correlations. Parameters ---------- other : DataFrame, Series Object with which to compute correlations. axis : {0 or 'index', 1 or 'columns'}, default 0 0 or 'index' to compute column-wise, 1 or 'columns' for row-wise. drop : bool, default False Drop missing indices from result. method : {'pearson', 'kendall', 'spearman'} or callable * pearson : standard correlation coefficient * kendall : Kendall Tau correlation coefficient * spearman : Spearman rank correlation * callable: callable with input two 1d ndarrays and returning a float .. versionadded:: 0.24.0 Returns ------- Series Pairwise correlations. See Also -------- DataFrame.corr """ axis = self._get_axis_number(axis) this = self._get_numeric_data() if isinstance(other, Series): return this.apply(lambda x: other.corr(x, method=method), axis=axis) other = other._get_numeric_data() left, right = this.align(other, join='inner', copy=False) if axis == 1: left = left.T right = right.T if method == 'pearson': # mask missing values left = left + right * 0 right = right + left * 0 # demeaned data ldem = left - left.mean() rdem = right - right.mean() num = (ldem * rdem).sum() dom = (left.count() - 1) * left.std() * right.std() correl = num / dom elif method in ['kendall', 'spearman'] or callable(method): def c(x): return nanops.nancorr(x[0], x[1], method=method) correl = Series(map(c, zip(left.values.T, right.values.T)), index=left.columns) else: raise ValueError("Invalid method {method} was passed, " "valid methods are: 'pearson', 'kendall', " "'spearman', or callable". format(method=method)) if not drop: # Find non-matching labels along the given axis # and append missing correlations (GH 22375) raxis = 1 if axis == 0 else 0 result_index = (this._get_axis(raxis). union(other._get_axis(raxis))) idx_diff = result_index.difference(correl.index) if len(idx_diff) > 0: correl = correl.append(Series([np.nan] * len(idx_diff), index=idx_diff)) return correl # ---------------------------------------------------------------------- # ndarray-like stats methods def count(self, axis=0, level=None, numeric_only=False): """ Count non-NA cells for each column or row. The values `None`, `NaN`, `NaT`, and optionally `numpy.inf` (depending on `pandas.options.mode.use_inf_as_na`) are considered NA. Parameters ---------- axis : {0 or 'index', 1 or 'columns'}, default 0 If 0 or 'index' counts are generated for each column. If 1 or 'columns' counts are generated for each **row**. level : int or str, optional If the axis is a `MultiIndex` (hierarchical), count along a particular `level`, collapsing into a `DataFrame`. A `str` specifies the level name. numeric_only : bool, default False Include only `float`, `int` or `boolean` data. Returns ------- Series or DataFrame For each column/row the number of non-NA/null entries. If `level` is specified returns a `DataFrame`. See Also -------- Series.count: Number of non-NA elements in a Series. DataFrame.shape: Number of DataFrame rows and columns (including NA elements). DataFrame.isna: Boolean same-sized DataFrame showing places of NA elements. Examples -------- Constructing DataFrame from a dictionary: >>> df = pd.DataFrame({"Person": ... ["John", "Myla", "Lewis", "John", "Myla"], ... "Age": [24., np.nan, 21., 33, 26], ... "Single": [False, True, True, True, False]}) >>> df Person Age Single 0 John 24.0 False 1 Myla NaN True 2 Lewis 21.0 True 3 John 33.0 True 4 Myla 26.0 False Notice the uncounted NA values: >>> df.count() Person 5 Age 4 Single 5 dtype: int64 Counts for each **row**: >>> df.count(axis='columns') 0 3 1 2 2 3 3 3 4 3 dtype: int64 Counts for one level of a `MultiIndex`: >>> df.set_index(["Person", "Single"]).count(level="Person") Age Person John 2 Lewis 1 Myla 1 """ axis = self._get_axis_number(axis) if level is not None: return self._count_level(level, axis=axis, numeric_only=numeric_only) if numeric_only: frame = self._get_numeric_data() else: frame = self # GH #423 if len(frame._get_axis(axis)) == 0: result = Series(0, index=frame._get_agg_axis(axis)) else: if frame._is_mixed_type or frame._data.any_extension_types: # the or any_extension_types is really only hit for single- # column frames with an extension array result = notna(frame).sum(axis=axis) else: # GH13407 series_counts = notna(frame).sum(axis=axis) counts = series_counts.values result = Series(counts, index=frame._get_agg_axis(axis)) return result.astype('int64') def _count_level(self, level, axis=0, numeric_only=False): if numeric_only: frame = self._get_numeric_data() else: frame = self count_axis = frame._get_axis(axis) agg_axis = frame._get_agg_axis(axis) if not isinstance(count_axis, MultiIndex): raise TypeError("Can only count levels on hierarchical " "{ax}.".format(ax=self._get_axis_name(axis))) if frame._is_mixed_type: # Since we have mixed types, calling notna(frame.values) might # upcast everything to object mask = notna(frame).values else: # But use the speedup when we have homogeneous dtypes mask = notna(frame.values) if axis == 1: # We're transposing the mask rather than frame to avoid potential # upcasts to object, which induces a ~20x slowdown mask = mask.T if isinstance(level, str): level = count_axis._get_level_number(level) level_index = count_axis.levels[level] level_codes = ensure_int64(count_axis.codes[level]) counts = lib.count_level_2d(mask, level_codes, len(level_index), axis=0) result = DataFrame(counts, index=level_index, columns=agg_axis) if axis == 1: # Undo our earlier transpose return result.T else: return result def _reduce(self, op, name, axis=0, skipna=True, numeric_only=None, filter_type=None, **kwds): if axis is None and filter_type == 'bool': labels = None constructor = None else: # TODO: Make other agg func handle axis=None properly axis = self._get_axis_number(axis) labels = self._get_agg_axis(axis) constructor = self._constructor def f(x): return op(x, axis=axis, skipna=skipna, **kwds) # exclude timedelta/datetime unless we are uniform types if (axis == 1 and self._is_datelike_mixed_type and (not self._is_homogeneous_type and not is_datetime64tz_dtype(self.dtypes[0]))): numeric_only = True if numeric_only is None: try: values = self.values result = f(values) if (filter_type == 'bool' and is_object_dtype(values) and axis is None): # work around https://github.com/numpy/numpy/issues/10489 # TODO: combine with hasattr(result, 'dtype') further down # hard since we don't have `values` down there. result = np.bool_(result) except Exception as e: # try by-column first if filter_type is None and axis == 0: try: # this can end up with a non-reduction # but not always. if the types are mixed # with datelike then need to make sure a series # we only end up here if we have not specified # numeric_only and yet we have tried a # column-by-column reduction, where we have mixed type. # So let's just do what we can from pandas.core.apply import frame_apply opa = frame_apply(self, func=f, result_type='expand', ignore_failures=True) result = opa.get_result() if result.ndim == self.ndim: result = result.iloc[0] return result except Exception: pass if filter_type is None or filter_type == 'numeric': data = self._get_numeric_data() elif filter_type == 'bool': data = self._get_bool_data() else: # pragma: no cover e = NotImplementedError( "Handling exception with filter_type {f} not" "implemented.".format(f=filter_type)) raise_with_traceback(e) with np.errstate(all='ignore'): result = f(data.values) labels = data._get_agg_axis(axis) else: if numeric_only: if filter_type is None or filter_type == 'numeric': data = self._get_numeric_data() elif filter_type == 'bool': # GH 25101, # GH 24434 data = self._get_bool_data() if axis == 0 else self else: # pragma: no cover msg = ("Generating numeric_only data with filter_type {f}" "not supported.".format(f=filter_type)) raise NotImplementedError(msg) values = data.values labels = data._get_agg_axis(axis) else: values = self.values result = f(values) if hasattr(result, 'dtype') and is_object_dtype(result.dtype): try: if filter_type is None or filter_type == 'numeric': result = result.astype(np.float64) elif filter_type == 'bool' and notna(result).all(): result = result.astype(np.bool_) except (ValueError, TypeError): # try to coerce to the original dtypes item by item if we can if axis == 0: result = coerce_to_dtypes(result, self.dtypes) if constructor is not None: result = Series(result, index=labels) return result def nunique(self, axis=0, dropna=True): """ Count distinct observations over requested axis. Return Series with number of distinct observations. Can ignore NaN values. .. versionadded:: 0.20.0 Parameters ---------- axis : {0 or 'index', 1 or 'columns'}, default 0 The axis to use. 0 or 'index' for row-wise, 1 or 'columns' for column-wise. dropna : bool, default True Don't include NaN in the counts. Returns ------- Series See Also -------- Series.nunique: Method nunique for Series. DataFrame.count: Count non-NA cells for each column or row. Examples -------- >>> df = pd.DataFrame({'A': [1, 2, 3], 'B': [1, 1, 1]}) >>> df.nunique() A 3 B 1 dtype: int64 >>> df.nunique(axis=1) 0 1 1 2 2 2 dtype: int64 """ return self.apply(Series.nunique, axis=axis, dropna=dropna) def idxmin(self, axis=0, skipna=True): """ Return index of first occurrence of minimum over requested axis. NA/null values are excluded. Parameters ---------- axis : {0 or 'index', 1 or 'columns'}, default 0 0 or 'index' for row-wise, 1 or 'columns' for column-wise skipna : boolean, default True Exclude NA/null values. If an entire row/column is NA, the result will be NA. Returns ------- Series Indexes of minima along the specified axis. Raises ------ ValueError * If the row/column is empty See Also -------- Series.idxmin Notes ----- This method is the DataFrame version of ``ndarray.argmin``. """ axis = self._get_axis_number(axis) indices = nanops.nanargmin(self.values, axis=axis, skipna=skipna) index = self._get_axis(axis) result = [index[i] if i >= 0 else np.nan for i in indices] return Series(result, index=self._get_agg_axis(axis)) def idxmax(self, axis=0, skipna=True): """ Return index of first occurrence of maximum over requested axis. NA/null values are excluded. Parameters ---------- axis : {0 or 'index', 1 or 'columns'}, default 0 0 or 'index' for row-wise, 1 or 'columns' for column-wise skipna : boolean, default True Exclude NA/null values. If an entire row/column is NA, the result will be NA. Returns ------- Series Indexes of maxima along the specified axis. Raises ------ ValueError * If the row/column is empty See Also -------- Series.idxmax Notes ----- This method is the DataFrame version of ``ndarray.argmax``. """ axis = self._get_axis_number(axis) indices = nanops.nanargmax(self.values, axis=axis, skipna=skipna) index = self._get_axis(axis) result = [index[i] if i >= 0 else np.nan for i in indices] return Series(result, index=self._get_agg_axis(axis)) def _get_agg_axis(self, axis_num): """ Let's be explicit about this. """ if axis_num == 0: return self.columns elif axis_num == 1: return self.index else: raise ValueError('Axis must be 0 or 1 (got %r)' % axis_num) def mode(self, axis=0, numeric_only=False, dropna=True): """ Get the mode(s) of each element along the selected axis. The mode of a set of values is the value that appears most often. It can be multiple values. Parameters ---------- axis : {0 or 'index', 1 or 'columns'}, default 0 The axis to iterate over while searching for the mode: * 0 or 'index' : get mode of each column * 1 or 'columns' : get mode of each row numeric_only : bool, default False If True, only apply to numeric columns. dropna : bool, default True Don't consider counts of NaN/NaT. .. versionadded:: 0.24.0 Returns ------- DataFrame The modes of each column or row. See Also -------- Series.mode : Return the highest frequency value in a Series. Series.value_counts : Return the counts of values in a Series. Examples -------- >>> df = pd.DataFrame([('bird', 2, 2), ... ('mammal', 4, np.nan), ... ('arthropod', 8, 0), ... ('bird', 2, np.nan)], ... index=('falcon', 'horse', 'spider', 'ostrich'), ... columns=('species', 'legs', 'wings')) >>> df species legs wings falcon bird 2 2.0 horse mammal 4 NaN spider arthropod 8 0.0 ostrich bird 2 NaN By default, missing values are not considered, and the mode of wings are both 0 and 2. The second row of species and legs contains ``NaN``, because they have only one mode, but the DataFrame has two rows. >>> df.mode() species legs wings 0 bird 2.0 0.0 1 NaN NaN 2.0 Setting ``dropna=False`` ``NaN`` values are considered and they can be the mode (like for wings). >>> df.mode(dropna=False) species legs wings 0 bird 2 NaN Setting ``numeric_only=True``, only the mode of numeric columns is computed, and columns of other types are ignored. >>> df.mode(numeric_only=True) legs wings 0 2.0 0.0 1 NaN 2.0 To compute the mode over columns and not rows, use the axis parameter: >>> df.mode(axis='columns', numeric_only=True) 0 1 falcon 2.0 NaN horse 4.0 NaN spider 0.0 8.0 ostrich 2.0 NaN """ data = self if not numeric_only else self._get_numeric_data() def f(s): return s.mode(dropna=dropna) return data.apply(f, axis=axis) def quantile(self, q=0.5, axis=0, numeric_only=True, interpolation='linear'): """ Return values at the given quantile over requested axis. Parameters ---------- q : float or array-like, default 0.5 (50% quantile) Value between 0 <= q <= 1, the quantile(s) to compute. axis : {0, 1, 'index', 'columns'} (default 0) Equals 0 or 'index' for row-wise, 1 or 'columns' for column-wise. numeric_only : bool, default True If False, the quantile of datetime and timedelta data will be computed as well. interpolation : {'linear', 'lower', 'higher', 'midpoint', 'nearest'} This optional parameter specifies the interpolation method to use, when the desired quantile lies between two data points `i` and `j`: * linear: `i + (j - i) * fraction`, where `fraction` is the fractional part of the index surrounded by `i` and `j`. * lower: `i`. * higher: `j`. * nearest: `i` or `j` whichever is nearest. * midpoint: (`i` + `j`) / 2. .. versionadded:: 0.18.0 Returns ------- Series or DataFrame If ``q`` is an array, a DataFrame will be returned where the index is ``q``, the columns are the columns of self, and the values are the quantiles. If ``q`` is a float, a Series will be returned where the index is the columns of self and the values are the quantiles. See Also -------- core.window.Rolling.quantile: Rolling quantile. numpy.percentile: Numpy function to compute the percentile. Examples -------- >>> df = pd.DataFrame(np.array([[1, 1], [2, 10], [3, 100], [4, 100]]), ... columns=['a', 'b']) >>> df.quantile(.1) a 1.3 b 3.7 Name: 0.1, dtype: float64 >>> df.quantile([.1, .5]) a b 0.1 1.3 3.7 0.5 2.5 55.0 Specifying `numeric_only=False` will also compute the quantile of datetime and timedelta data. >>> df = pd.DataFrame({'A': [1, 2], ... 'B': [pd.Timestamp('2010'), ... pd.Timestamp('2011')], ... 'C': [pd.Timedelta('1 days'), ... pd.Timedelta('2 days')]}) >>> df.quantile(0.5, numeric_only=False) A 1.5 B 2010-07-02 12:00:00 C 1 days 12:00:00 Name: 0.5, dtype: object """ self._check_percentile(q) data = self._get_numeric_data() if numeric_only else self axis = self._get_axis_number(axis) is_transposed = axis == 1 if is_transposed: data = data.T result = data._data.quantile(qs=q, axis=1, interpolation=interpolation, transposed=is_transposed) if result.ndim == 2: result = self._constructor(result) else: result = self._constructor_sliced(result, name=q) if is_transposed: result = result.T return result def to_timestamp(self, freq=None, how='start', axis=0, copy=True): """ Cast to DatetimeIndex of timestamps, at *beginning* of period. Parameters ---------- freq : str, default frequency of PeriodIndex Desired frequency. how : {'s', 'e', 'start', 'end'} Convention for converting period to timestamp; start of period vs. end. axis : {0 or 'index', 1 or 'columns'}, default 0 The axis to convert (the index by default). copy : bool, default True If False then underlying input data is not copied. Returns ------- DataFrame with DatetimeIndex """ new_data = self._data if copy: new_data = new_data.copy() axis = self._get_axis_number(axis) if axis == 0: new_data.set_axis(1, self.index.to_timestamp(freq=freq, how=how)) elif axis == 1: new_data.set_axis(0, self.columns.to_timestamp(freq=freq, how=how)) else: # pragma: no cover raise AssertionError('Axis must be 0 or 1. Got {ax!s}'.format( ax=axis)) return self._constructor(new_data) def to_period(self, freq=None, axis=0, copy=True): """ Convert DataFrame from DatetimeIndex to PeriodIndex with desired frequency (inferred from index if not passed). Parameters ---------- freq : str, default Frequency of the PeriodIndex. axis : {0 or 'index', 1 or 'columns'}, default 0 The axis to convert (the index by default). copy : bool, default True If False then underlying input data is not copied. Returns ------- TimeSeries with PeriodIndex """ new_data = self._data if copy: new_data = new_data.copy() axis = self._get_axis_number(axis) if axis == 0: new_data.set_axis(1, self.index.to_period(freq=freq)) elif axis == 1: new_data.set_axis(0, self.columns.to_period(freq=freq)) else: # pragma: no cover raise AssertionError('Axis must be 0 or 1. Got {ax!s}'.format( ax=axis)) return self._constructor(new_data) def isin(self, values): """ Whether each element in the DataFrame is contained in values. Parameters ---------- values : iterable, Series, DataFrame or dict The result will only be true at a location if all the labels match. If `values` is a Series, that's the index. If `values` is a dict, the keys must be the column names, which must match. If `values` is a DataFrame, then both the index and column labels must match. Returns ------- DataFrame DataFrame of booleans showing whether each element in the DataFrame is contained in values. See Also -------- DataFrame.eq: Equality test for DataFrame. Series.isin: Equivalent method on Series. Series.str.contains: Test if pattern or regex is contained within a string of a Series or Index. Examples -------- >>> df = pd.DataFrame({'num_legs': [2, 4], 'num_wings': [2, 0]}, ... index=['falcon', 'dog']) >>> df num_legs num_wings falcon 2 2 dog 4 0 When ``values`` is a list check whether every value in the DataFrame is present in the list (which animals have 0 or 2 legs or wings) >>> df.isin([0, 2]) num_legs num_wings falcon True True dog False True When ``values`` is a dict, we can pass values to check for each column separately: >>> df.isin({'num_wings': [0, 3]}) num_legs num_wings falcon False False dog False True When ``values`` is a Series or DataFrame the index and column must match. Note that 'falcon' does not match based on the number of legs in df2. >>> other = pd.DataFrame({'num_legs': [8, 2], 'num_wings': [0, 2]}, ... index=['spider', 'falcon']) >>> df.isin(other) num_legs num_wings falcon True True dog False False """ if isinstance(values, dict): from pandas.core.reshape.concat import concat values = collections.defaultdict(list, values) return concat((self.iloc[:, [i]].isin(values[col]) for i, col in enumerate(self.columns)), axis=1) elif isinstance(values, Series): if not values.index.is_unique: raise ValueError("cannot compute isin with " "a duplicate axis.") return self.eq(values.reindex_like(self), axis='index') elif isinstance(values, DataFrame): if not (values.columns.is_unique and values.index.is_unique): raise ValueError("cannot compute isin with " "a duplicate axis.") return self.eq(values.reindex_like(self)) else: if not is_list_like(values): raise TypeError("only list-like or dict-like objects are " "allowed to be passed to DataFrame.isin(), " "you passed a " "{0!r}".format(type(values).__name__)) return DataFrame( algorithms.isin(self.values.ravel(), values).reshape(self.shape), self.index, self.columns) # ---------------------------------------------------------------------- # Add plotting methods to DataFrame plot = CachedAccessor("plot", pandas.plotting.FramePlotMethods) hist = pandas.plotting.hist_frame boxplot = pandas.plotting.boxplot_frame sparse = CachedAccessor("sparse", SparseFrameAccessor) DataFrame._setup_axes(['index', 'columns'], info_axis=1, stat_axis=0, axes_are_reversed=True, aliases={'rows': 0}, docs={ 'index': 'The index (row labels) of the DataFrame.', 'columns': 'The column labels of the DataFrame.'}) DataFrame._add_numeric_operations() DataFrame._add_series_or_dataframe_operations() ops.add_flex_arithmetic_methods(DataFrame) ops.add_special_arithmetic_methods(DataFrame) def _from_nested_dict(data): # TODO: this should be seriously cythonized new_data = OrderedDict() for index, s in data.items(): for col, v in s.items(): new_data[col] = new_data.get(col, OrderedDict()) new_data[col][index] = v return new_data def _put_str(s, space): return '{s}'.format(s=s)[:space].ljust(space)
import pytest import pandas as pd from pandas import TimedeltaIndex class TestTimedeltaIndexRendering: @pytest.mark.parametrize('method', ['__repr__', '__str__']) def test_representation(self, method): idx1 = TimedeltaIndex([], freq='D') idx2 = TimedeltaIndex(['1 days'], freq='D') idx3 = TimedeltaIndex(['1 days', '2 days'], freq='D') idx4 = TimedeltaIndex(['1 days', '2 days', '3 days'], freq='D') idx5 = TimedeltaIndex(['1 days 00:00:01', '2 days', '3 days']) exp1 = """TimedeltaIndex([], dtype='timedelta64[ns]', freq='D')""" exp2 = ("TimedeltaIndex(['1 days'], dtype='timedelta64[ns]', " "freq='D')") exp3 = ("TimedeltaIndex(['1 days', '2 days'], " "dtype='timedelta64[ns]', freq='D')") exp4 = ("TimedeltaIndex(['1 days', '2 days', '3 days'], " "dtype='timedelta64[ns]', freq='D')") exp5 = ("TimedeltaIndex(['1 days 00:00:01', '2 days 00:00:00', " "'3 days 00:00:00'], dtype='timedelta64[ns]', freq=None)") with pd.option_context('display.width', 300): for idx, expected in zip([idx1, idx2, idx3, idx4, idx5], [exp1, exp2, exp3, exp4, exp5]): result = getattr(idx, method)() assert result == expected def test_representation_to_series(self): idx1 = TimedeltaIndex([], freq='D') idx2 = TimedeltaIndex(['1 days'], freq='D') idx3 = TimedeltaIndex(['1 days', '2 days'], freq='D') idx4 = TimedeltaIndex(['1 days', '2 days', '3 days'], freq='D') idx5 = TimedeltaIndex(['1 days 00:00:01', '2 days', '3 days']) exp1 = """Series([], dtype: timedelta64[ns])""" exp2 = ("0 1 days\n" "dtype: timedelta64[ns]") exp3 = ("0 1 days\n" "1 2 days\n" "dtype: timedelta64[ns]") exp4 = ("0 1 days\n" "1 2 days\n" "2 3 days\n" "dtype: timedelta64[ns]") exp5 = ("0 1 days 00:00:01\n" "1 2 days 00:00:00\n" "2 3 days 00:00:00\n" "dtype: timedelta64[ns]") with pd.option_context('display.width', 300): for idx, expected in zip([idx1, idx2, idx3, idx4, idx5], [exp1, exp2, exp3, exp4, exp5]): result = repr(pd.Series(idx)) assert result == expected def test_summary(self): # GH#9116 idx1 = TimedeltaIndex([], freq='D') idx2 = TimedeltaIndex(['1 days'], freq='D') idx3 = TimedeltaIndex(['1 days', '2 days'], freq='D') idx4 = TimedeltaIndex(['1 days', '2 days', '3 days'], freq='D') idx5 = TimedeltaIndex(['1 days 00:00:01', '2 days', '3 days']) exp1 = ("TimedeltaIndex: 0 entries\n" "Freq: D") exp2 = ("TimedeltaIndex: 1 entries, 1 days to 1 days\n" "Freq: D") exp3 = ("TimedeltaIndex: 2 entries, 1 days to 2 days\n" "Freq: D") exp4 = ("TimedeltaIndex: 3 entries, 1 days to 3 days\n" "Freq: D") exp5 = ("TimedeltaIndex: 3 entries, 1 days 00:00:01 to 3 days " "00:00:00") for idx, expected in zip([idx1, idx2, idx3, idx4, idx5], [exp1, exp2, exp3, exp4, exp5]): result = idx._summary() assert result == expected
cbertinato/pandas
pandas/tests/indexes/timedeltas/test_formats.py
pandas/core/frame.py
""" manage PyTables query interface via Expressions """ import ast from functools import partial import numpy as np from pandas._libs.tslibs import Timedelta, Timestamp from pandas.compat.chainmap import DeepChainMap from pandas.core.dtypes.common import is_list_like import pandas as pd from pandas.core.base import StringMixin import pandas.core.common as com from pandas.core.computation import expr, ops from pandas.core.computation.common import _ensure_decoded from pandas.core.computation.expr import BaseExprVisitor from pandas.core.computation.ops import UndefinedVariableError, is_term from pandas.io.formats.printing import pprint_thing, pprint_thing_encoded class Scope(expr.Scope): __slots__ = 'queryables', def __init__(self, level, global_dict=None, local_dict=None, queryables=None): super().__init__(level + 1, global_dict=global_dict, local_dict=local_dict) self.queryables = queryables or dict() class Term(ops.Term): def __new__(cls, name, env, side=None, encoding=None): klass = Constant if not isinstance(name, str) else cls supr_new = StringMixin.__new__ return supr_new(klass) def __init__(self, name, env, side=None, encoding=None): super().__init__(name, env, side=side, encoding=encoding) def _resolve_name(self): # must be a queryables if self.side == 'left': if self.name not in self.env.queryables: raise NameError('name {name!r} is not defined' .format(name=self.name)) return self.name # resolve the rhs (and allow it to be None) try: return self.env.resolve(self.name, is_local=False) except UndefinedVariableError: return self.name # read-only property overwriting read/write property @property # type: ignore def value(self): return self._value class Constant(Term): def __init__(self, value, env, side=None, encoding=None): super().__init__(value, env, side=side, encoding=encoding) def _resolve_name(self): return self._name class BinOp(ops.BinOp): _max_selectors = 31 def __init__(self, op, lhs, rhs, queryables, encoding): super().__init__(op, lhs, rhs) self.queryables = queryables self.encoding = encoding self.filter = None self.condition = None def _disallow_scalar_only_bool_ops(self): pass def prune(self, klass): def pr(left, right): """ create and return a new specialized BinOp from myself """ if left is None: return right elif right is None: return left k = klass if isinstance(left, ConditionBinOp): if (isinstance(left, ConditionBinOp) and isinstance(right, ConditionBinOp)): k = JointConditionBinOp elif isinstance(left, k): return left elif isinstance(right, k): return right elif isinstance(left, FilterBinOp): if (isinstance(left, FilterBinOp) and isinstance(right, FilterBinOp)): k = JointFilterBinOp elif isinstance(left, k): return left elif isinstance(right, k): return right return k(self.op, left, right, queryables=self.queryables, encoding=self.encoding).evaluate() left, right = self.lhs, self.rhs if is_term(left) and is_term(right): res = pr(left.value, right.value) elif not is_term(left) and is_term(right): res = pr(left.prune(klass), right.value) elif is_term(left) and not is_term(right): res = pr(left.value, right.prune(klass)) elif not (is_term(left) or is_term(right)): res = pr(left.prune(klass), right.prune(klass)) return res def conform(self, rhs): """ inplace conform rhs """ if not is_list_like(rhs): rhs = [rhs] if isinstance(rhs, np.ndarray): rhs = rhs.ravel() return rhs @property def is_valid(self): """ return True if this is a valid field """ return self.lhs in self.queryables @property def is_in_table(self): """ return True if this is a valid column name for generation (e.g. an actual column in the table) """ return self.queryables.get(self.lhs) is not None @property def kind(self): """ the kind of my field """ return getattr(self.queryables.get(self.lhs), 'kind', None) @property def meta(self): """ the meta of my field """ return getattr(self.queryables.get(self.lhs), 'meta', None) @property def metadata(self): """ the metadata of my field """ return getattr(self.queryables.get(self.lhs), 'metadata', None) def generate(self, v): """ create and return the op string for this TermValue """ val = v.tostring(self.encoding) return "({lhs} {op} {val})".format(lhs=self.lhs, op=self.op, val=val) def convert_value(self, v): """ convert the expression that is in the term to something that is accepted by pytables """ def stringify(value): if self.encoding is not None: encoder = partial(pprint_thing_encoded, encoding=self.encoding) else: encoder = pprint_thing return encoder(value) kind = _ensure_decoded(self.kind) meta = _ensure_decoded(self.meta) if kind == 'datetime64' or kind == 'datetime': if isinstance(v, (int, float)): v = stringify(v) v = _ensure_decoded(v) v = Timestamp(v) if v.tz is not None: v = v.tz_convert('UTC') return TermValue(v, v.value, kind) elif kind == 'timedelta64' or kind == 'timedelta': v = Timedelta(v, unit='s').value return TermValue(int(v), v, kind) elif meta == 'category': metadata = com.values_from_object(self.metadata) result = metadata.searchsorted(v, side='left') # result returns 0 if v is first element or if v is not in metadata # check that metadata contains v if not result and v not in metadata: result = -1 return TermValue(result, result, 'integer') elif kind == 'integer': v = int(float(v)) return TermValue(v, v, kind) elif kind == 'float': v = float(v) return TermValue(v, v, kind) elif kind == 'bool': if isinstance(v, str): v = not v.strip().lower() in ['false', 'f', 'no', 'n', 'none', '0', '[]', '{}', ''] else: v = bool(v) return TermValue(v, v, kind) elif isinstance(v, str): # string quoting return TermValue(v, stringify(v), 'string') else: raise TypeError("Cannot compare {v} of type {typ} to {kind} column" .format(v=v, typ=type(v), kind=kind)) def convert_values(self): pass class FilterBinOp(BinOp): def __str__(self): return pprint_thing("[Filter : [{lhs}] -> [{op}]" .format(lhs=self.filter[0], op=self.filter[1])) def invert(self): """ invert the filter """ if self.filter is not None: f = list(self.filter) f[1] = self.generate_filter_op(invert=True) self.filter = tuple(f) return self def format(self): """ return the actual filter format """ return [self.filter] def evaluate(self): if not self.is_valid: raise ValueError("query term is not valid [{slf}]" .format(slf=self)) rhs = self.conform(self.rhs) values = [TermValue(v, v, self.kind).value for v in rhs] if self.is_in_table: # if too many values to create the expression, use a filter instead if self.op in ['==', '!='] and len(values) > self._max_selectors: filter_op = self.generate_filter_op() self.filter = ( self.lhs, filter_op, pd.Index(values)) return self return None # equality conditions if self.op in ['==', '!=']: filter_op = self.generate_filter_op() self.filter = ( self.lhs, filter_op, pd.Index(values)) else: raise TypeError("passing a filterable condition to a non-table " "indexer [{slf}]".format(slf=self)) return self def generate_filter_op(self, invert=False): if (self.op == '!=' and not invert) or (self.op == '==' and invert): return lambda axis, vals: ~axis.isin(vals) else: return lambda axis, vals: axis.isin(vals) class JointFilterBinOp(FilterBinOp): def format(self): raise NotImplementedError("unable to collapse Joint Filters") def evaluate(self): return self class ConditionBinOp(BinOp): def __str__(self): return pprint_thing("[Condition : [{cond}]]" .format(cond=self.condition)) def invert(self): """ invert the condition """ # if self.condition is not None: # self.condition = "~(%s)" % self.condition # return self raise NotImplementedError("cannot use an invert condition when " "passing to numexpr") def format(self): """ return the actual ne format """ return self.condition def evaluate(self): if not self.is_valid: raise ValueError("query term is not valid [{slf}]" .format(slf=self)) # convert values if we are in the table if not self.is_in_table: return None rhs = self.conform(self.rhs) values = [self.convert_value(v) for v in rhs] # equality conditions if self.op in ['==', '!=']: # too many values to create the expression? if len(values) <= self._max_selectors: vs = [self.generate(v) for v in values] self.condition = "({cond})".format(cond=' | '.join(vs)) # use a filter after reading else: return None else: self.condition = self.generate(values[0]) return self class JointConditionBinOp(ConditionBinOp): def evaluate(self): self.condition = "({lhs} {op} {rhs})".format(lhs=self.lhs.condition, op=self.op, rhs=self.rhs.condition) return self class UnaryOp(ops.UnaryOp): def prune(self, klass): if self.op != '~': raise NotImplementedError("UnaryOp only support invert type ops") operand = self.operand operand = operand.prune(klass) if operand is not None: if issubclass(klass, ConditionBinOp): if operand.condition is not None: return operand.invert() elif issubclass(klass, FilterBinOp): if operand.filter is not None: return operand.invert() return None _op_classes = {'unary': UnaryOp} class ExprVisitor(BaseExprVisitor): const_type = Constant term_type = Term def __init__(self, env, engine, parser, **kwargs): super().__init__(env, engine, parser) for bin_op in self.binary_ops: bin_node = self.binary_op_nodes_map[bin_op] setattr(self, 'visit_{node}'.format(node=bin_node), lambda node, bin_op=bin_op: partial(BinOp, bin_op, **kwargs)) def visit_UnaryOp(self, node, **kwargs): if isinstance(node.op, (ast.Not, ast.Invert)): return UnaryOp('~', self.visit(node.operand)) elif isinstance(node.op, ast.USub): return self.const_type(-self.visit(node.operand).value, self.env) elif isinstance(node.op, ast.UAdd): raise NotImplementedError('Unary addition not supported') def visit_Index(self, node, **kwargs): return self.visit(node.value).value def visit_Assign(self, node, **kwargs): cmpr = ast.Compare(ops=[ast.Eq()], left=node.targets[0], comparators=[node.value]) return self.visit(cmpr) def visit_Subscript(self, node, **kwargs): # only allow simple subscripts value = self.visit(node.value) slobj = self.visit(node.slice) try: value = value.value except AttributeError: pass try: return self.const_type(value[slobj], self.env) except TypeError: raise ValueError("cannot subscript {value!r} with " "{slobj!r}".format(value=value, slobj=slobj)) def visit_Attribute(self, node, **kwargs): attr = node.attr value = node.value ctx = node.ctx.__class__ if ctx == ast.Load: # resolve the value resolved = self.visit(value) # try to get the value to see if we are another expression try: resolved = resolved.value except (AttributeError): pass try: return self.term_type(getattr(resolved, attr), self.env) except AttributeError: # something like datetime.datetime where scope is overridden if isinstance(value, ast.Name) and value.id == attr: return resolved raise ValueError("Invalid Attribute context {name}" .format(name=ctx.__name__)) def translate_In(self, op): return ast.Eq() if isinstance(op, ast.In) else op def _rewrite_membership_op(self, node, left, right): return self.visit(node.op), node.op, left, right def _validate_where(w): """ Validate that the where statement is of the right type. The type may either be String, Expr, or list-like of Exprs. Parameters ---------- w : String term expression, Expr, or list-like of Exprs. Returns ------- where : The original where clause if the check was successful. Raises ------ TypeError : An invalid data type was passed in for w (e.g. dict). """ if not (isinstance(w, (Expr, str)) or is_list_like(w)): raise TypeError("where must be passed as a string, Expr, " "or list-like of Exprs") return w class Expr(expr.Expr): """ hold a pytables like expression, comprised of possibly multiple 'terms' Parameters ---------- where : string term expression, Expr, or list-like of Exprs queryables : a "kinds" map (dict of column name -> kind), or None if column is non-indexable encoding : an encoding that will encode the query terms Returns ------- an Expr object Examples -------- 'index>=date' "columns=['A', 'D']" 'columns=A' 'columns==A' "~(columns=['A','B'])" 'index>df.index[3] & string="bar"' '(index>df.index[3] & index<=df.index[6]) | string="bar"' "ts>=Timestamp('2012-02-01')" "major_axis>=20130101" """ def __init__(self, where, queryables=None, encoding=None, scope_level=0): where = _validate_where(where) self.encoding = encoding self.condition = None self.filter = None self.terms = None self._visitor = None # capture the environment if needed local_dict = DeepChainMap() if isinstance(where, Expr): local_dict = where.env.scope where = where.expr elif isinstance(where, (list, tuple)): for idx, w in enumerate(where): if isinstance(w, Expr): local_dict = w.env.scope else: w = _validate_where(w) where[idx] = w where = ' & '.join(map('({})'.format, com.flatten(where))) # noqa self.expr = where self.env = Scope(scope_level + 1, local_dict=local_dict) if queryables is not None and isinstance(self.expr, str): self.env.queryables.update(queryables) self._visitor = ExprVisitor(self.env, queryables=queryables, parser='pytables', engine='pytables', encoding=encoding) self.terms = self.parse() def __str__(self): if self.terms is not None: return pprint_thing(self.terms) return pprint_thing(self.expr) def evaluate(self): """ create and return the numexpr condition and filter """ try: self.condition = self.terms.prune(ConditionBinOp) except AttributeError: raise ValueError("cannot process expression [{expr}], [{slf}] " "is not a valid condition".format(expr=self.expr, slf=self)) try: self.filter = self.terms.prune(FilterBinOp) except AttributeError: raise ValueError("cannot process expression [{expr}], [{slf}] " "is not a valid filter".format(expr=self.expr, slf=self)) return self.condition, self.filter class TermValue: """ hold a term value the we use to construct a condition/filter """ def __init__(self, value, converted, kind): self.value = value self.converted = converted self.kind = kind def tostring(self, encoding): """ quote the string if not encoded else encode and return """ if self.kind == 'string': if encoding is not None: return self.converted return '"{converted}"'.format(converted=self.converted) elif self.kind == 'float': # python 2 str(float) is not always # round-trippable so use repr() return repr(self.converted) return self.converted def maybe_expression(s): """ loose checking if s is a pytables-acceptable expression """ if not isinstance(s, str): return False ops = ExprVisitor.binary_ops + ExprVisitor.unary_ops + ('=',) # make sure we have an op at least return any(op in s for op in ops)
import pytest import pandas as pd from pandas import TimedeltaIndex class TestTimedeltaIndexRendering: @pytest.mark.parametrize('method', ['__repr__', '__str__']) def test_representation(self, method): idx1 = TimedeltaIndex([], freq='D') idx2 = TimedeltaIndex(['1 days'], freq='D') idx3 = TimedeltaIndex(['1 days', '2 days'], freq='D') idx4 = TimedeltaIndex(['1 days', '2 days', '3 days'], freq='D') idx5 = TimedeltaIndex(['1 days 00:00:01', '2 days', '3 days']) exp1 = """TimedeltaIndex([], dtype='timedelta64[ns]', freq='D')""" exp2 = ("TimedeltaIndex(['1 days'], dtype='timedelta64[ns]', " "freq='D')") exp3 = ("TimedeltaIndex(['1 days', '2 days'], " "dtype='timedelta64[ns]', freq='D')") exp4 = ("TimedeltaIndex(['1 days', '2 days', '3 days'], " "dtype='timedelta64[ns]', freq='D')") exp5 = ("TimedeltaIndex(['1 days 00:00:01', '2 days 00:00:00', " "'3 days 00:00:00'], dtype='timedelta64[ns]', freq=None)") with pd.option_context('display.width', 300): for idx, expected in zip([idx1, idx2, idx3, idx4, idx5], [exp1, exp2, exp3, exp4, exp5]): result = getattr(idx, method)() assert result == expected def test_representation_to_series(self): idx1 = TimedeltaIndex([], freq='D') idx2 = TimedeltaIndex(['1 days'], freq='D') idx3 = TimedeltaIndex(['1 days', '2 days'], freq='D') idx4 = TimedeltaIndex(['1 days', '2 days', '3 days'], freq='D') idx5 = TimedeltaIndex(['1 days 00:00:01', '2 days', '3 days']) exp1 = """Series([], dtype: timedelta64[ns])""" exp2 = ("0 1 days\n" "dtype: timedelta64[ns]") exp3 = ("0 1 days\n" "1 2 days\n" "dtype: timedelta64[ns]") exp4 = ("0 1 days\n" "1 2 days\n" "2 3 days\n" "dtype: timedelta64[ns]") exp5 = ("0 1 days 00:00:01\n" "1 2 days 00:00:00\n" "2 3 days 00:00:00\n" "dtype: timedelta64[ns]") with pd.option_context('display.width', 300): for idx, expected in zip([idx1, idx2, idx3, idx4, idx5], [exp1, exp2, exp3, exp4, exp5]): result = repr(pd.Series(idx)) assert result == expected def test_summary(self): # GH#9116 idx1 = TimedeltaIndex([], freq='D') idx2 = TimedeltaIndex(['1 days'], freq='D') idx3 = TimedeltaIndex(['1 days', '2 days'], freq='D') idx4 = TimedeltaIndex(['1 days', '2 days', '3 days'], freq='D') idx5 = TimedeltaIndex(['1 days 00:00:01', '2 days', '3 days']) exp1 = ("TimedeltaIndex: 0 entries\n" "Freq: D") exp2 = ("TimedeltaIndex: 1 entries, 1 days to 1 days\n" "Freq: D") exp3 = ("TimedeltaIndex: 2 entries, 1 days to 2 days\n" "Freq: D") exp4 = ("TimedeltaIndex: 3 entries, 1 days to 3 days\n" "Freq: D") exp5 = ("TimedeltaIndex: 3 entries, 1 days 00:00:01 to 3 days " "00:00:00") for idx, expected in zip([idx1, idx2, idx3, idx4, idx5], [exp1, exp2, exp3, exp4, exp5]): result = idx._summary() assert result == expected
cbertinato/pandas
pandas/tests/indexes/timedeltas/test_formats.py
pandas/core/computation/pytables.py
import numpy as np import pytest import pandas as pd from pandas import Index, MultiIndex @pytest.fixture def idx(): # a MultiIndex used to test the general functionality of the # general functionality of this object major_axis = Index(['foo', 'bar', 'baz', 'qux']) minor_axis = Index(['one', 'two']) major_codes = np.array([0, 0, 1, 2, 3, 3]) minor_codes = np.array([0, 1, 0, 1, 0, 1]) index_names = ['first', 'second'] mi = MultiIndex(levels=[major_axis, minor_axis], codes=[major_codes, minor_codes], names=index_names, verify_integrity=False) return mi @pytest.fixture def idx_dup(): # compare tests/indexes/multi/conftest.py major_axis = Index(['foo', 'bar', 'baz', 'qux']) minor_axis = Index(['one', 'two']) major_codes = np.array([0, 0, 1, 0, 1, 1]) minor_codes = np.array([0, 1, 0, 1, 0, 1]) index_names = ['first', 'second'] mi = MultiIndex(levels=[major_axis, minor_axis], codes=[major_codes, minor_codes], names=index_names, verify_integrity=False) return mi @pytest.fixture def index_names(): # names that match those in the idx fixture for testing equality of # names assigned to the idx return ['first', 'second'] @pytest.fixture def holder(): # the MultiIndex constructor used to base compatibility with pickle return MultiIndex @pytest.fixture def compat_props(): # a MultiIndex must have these properties associated with it return ['shape', 'ndim', 'size'] @pytest.fixture def narrow_multi_index(): """ Return a MultiIndex that is narrower than the display (<80 characters). """ n = 1000 ci = pd.CategoricalIndex(list('a' * n) + (['abc'] * n)) dti = pd.date_range('2000-01-01', freq='s', periods=n * 2) return pd.MultiIndex.from_arrays([ci, ci.codes + 9, dti], names=['a', 'b', 'dti']) @pytest.fixture def wide_multi_index(): """ Return a MultiIndex that is wider than the display (>80 characters). """ n = 1000 ci = pd.CategoricalIndex(list('a' * n) + (['abc'] * n)) dti = pd.date_range('2000-01-01', freq='s', periods=n * 2) levels = [ci, ci.codes + 9, dti, dti, dti] names = ['a', 'b', 'dti_1', 'dti_2', 'dti_3'] return pd.MultiIndex.from_arrays(levels, names=names)
import pytest import pandas as pd from pandas import TimedeltaIndex class TestTimedeltaIndexRendering: @pytest.mark.parametrize('method', ['__repr__', '__str__']) def test_representation(self, method): idx1 = TimedeltaIndex([], freq='D') idx2 = TimedeltaIndex(['1 days'], freq='D') idx3 = TimedeltaIndex(['1 days', '2 days'], freq='D') idx4 = TimedeltaIndex(['1 days', '2 days', '3 days'], freq='D') idx5 = TimedeltaIndex(['1 days 00:00:01', '2 days', '3 days']) exp1 = """TimedeltaIndex([], dtype='timedelta64[ns]', freq='D')""" exp2 = ("TimedeltaIndex(['1 days'], dtype='timedelta64[ns]', " "freq='D')") exp3 = ("TimedeltaIndex(['1 days', '2 days'], " "dtype='timedelta64[ns]', freq='D')") exp4 = ("TimedeltaIndex(['1 days', '2 days', '3 days'], " "dtype='timedelta64[ns]', freq='D')") exp5 = ("TimedeltaIndex(['1 days 00:00:01', '2 days 00:00:00', " "'3 days 00:00:00'], dtype='timedelta64[ns]', freq=None)") with pd.option_context('display.width', 300): for idx, expected in zip([idx1, idx2, idx3, idx4, idx5], [exp1, exp2, exp3, exp4, exp5]): result = getattr(idx, method)() assert result == expected def test_representation_to_series(self): idx1 = TimedeltaIndex([], freq='D') idx2 = TimedeltaIndex(['1 days'], freq='D') idx3 = TimedeltaIndex(['1 days', '2 days'], freq='D') idx4 = TimedeltaIndex(['1 days', '2 days', '3 days'], freq='D') idx5 = TimedeltaIndex(['1 days 00:00:01', '2 days', '3 days']) exp1 = """Series([], dtype: timedelta64[ns])""" exp2 = ("0 1 days\n" "dtype: timedelta64[ns]") exp3 = ("0 1 days\n" "1 2 days\n" "dtype: timedelta64[ns]") exp4 = ("0 1 days\n" "1 2 days\n" "2 3 days\n" "dtype: timedelta64[ns]") exp5 = ("0 1 days 00:00:01\n" "1 2 days 00:00:00\n" "2 3 days 00:00:00\n" "dtype: timedelta64[ns]") with pd.option_context('display.width', 300): for idx, expected in zip([idx1, idx2, idx3, idx4, idx5], [exp1, exp2, exp3, exp4, exp5]): result = repr(pd.Series(idx)) assert result == expected def test_summary(self): # GH#9116 idx1 = TimedeltaIndex([], freq='D') idx2 = TimedeltaIndex(['1 days'], freq='D') idx3 = TimedeltaIndex(['1 days', '2 days'], freq='D') idx4 = TimedeltaIndex(['1 days', '2 days', '3 days'], freq='D') idx5 = TimedeltaIndex(['1 days 00:00:01', '2 days', '3 days']) exp1 = ("TimedeltaIndex: 0 entries\n" "Freq: D") exp2 = ("TimedeltaIndex: 1 entries, 1 days to 1 days\n" "Freq: D") exp3 = ("TimedeltaIndex: 2 entries, 1 days to 2 days\n" "Freq: D") exp4 = ("TimedeltaIndex: 3 entries, 1 days to 3 days\n" "Freq: D") exp5 = ("TimedeltaIndex: 3 entries, 1 days 00:00:01 to 3 days " "00:00:00") for idx, expected in zip([idx1, idx2, idx3, idx4, idx5], [exp1, exp2, exp3, exp4, exp5]): result = idx._summary() assert result == expected
cbertinato/pandas
pandas/tests/indexes/timedeltas/test_formats.py
pandas/tests/indexes/multi/conftest.py
import numpy as np import pytest from pandas._libs.tslib import iNaT from pandas.core.dtypes.dtypes import CategoricalDtype import pandas as pd from pandas import ( CategoricalIndex, DatetimeIndex, Index, Int64Index, IntervalIndex, MultiIndex, PeriodIndex, RangeIndex, Series, TimedeltaIndex, UInt64Index, isna) from pandas.core.indexes.base import InvalidIndexError from pandas.core.indexes.datetimelike import DatetimeIndexOpsMixin import pandas.util.testing as tm class Base: """ base class for index sub-class tests """ _holder = None _compat_props = ['shape', 'ndim', 'size', 'nbytes'] def setup_indices(self): for name, idx in self.indices.items(): setattr(self, name, idx) def test_pickle_compat_construction(self): # need an object to create with msg = (r"Index\(\.\.\.\) must be called with a collection of some" r" kind, None was passed|" r"__new__\(\) missing 1 required positional argument: 'data'|" r"__new__\(\) takes at least 2 arguments \(1 given\)") with pytest.raises(TypeError, match=msg): self._holder() def test_to_series(self): # assert that we are creating a copy of the index idx = self.create_index() s = idx.to_series() assert s.values is not idx.values assert s.index is not idx assert s.name == idx.name def test_to_series_with_arguments(self): # GH18699 # index kwarg idx = self.create_index() s = idx.to_series(index=idx) assert s.values is not idx.values assert s.index is idx assert s.name == idx.name # name kwarg idx = self.create_index() s = idx.to_series(name='__test') assert s.values is not idx.values assert s.index is not idx assert s.name != idx.name @pytest.mark.parametrize("name", [None, "new_name"]) def test_to_frame(self, name): # see GH-15230, GH-22580 idx = self.create_index() if name: idx_name = name else: idx_name = idx.name or 0 df = idx.to_frame(name=idx_name) assert df.index is idx assert len(df.columns) == 1 assert df.columns[0] == idx_name assert df[idx_name].values is not idx.values df = idx.to_frame(index=False, name=idx_name) assert df.index is not idx def test_to_frame_datetime_tz(self): # GH 25809 idx = pd.date_range(start='2019-01-01', end='2019-01-30', freq='D') idx = idx.tz_localize('UTC') result = idx.to_frame() expected = pd.DataFrame(idx, index=idx) tm.assert_frame_equal(result, expected) def test_shift(self): # GH8083 test the base class for shift idx = self.create_index() msg = "Not supported for type {}".format(type(idx).__name__) with pytest.raises(NotImplementedError, match=msg): idx.shift(1) with pytest.raises(NotImplementedError, match=msg): idx.shift(1, 2) def test_create_index_existing_name(self): # GH11193, when an existing index is passed, and a new name is not # specified, the new index should inherit the previous object name expected = self.create_index() if not isinstance(expected, MultiIndex): expected.name = 'foo' result = pd.Index(expected) tm.assert_index_equal(result, expected) result = pd.Index(expected, name='bar') expected.name = 'bar' tm.assert_index_equal(result, expected) else: expected.names = ['foo', 'bar'] result = pd.Index(expected) tm.assert_index_equal( result, Index(Index([('foo', 'one'), ('foo', 'two'), ('bar', 'one'), ('baz', 'two'), ('qux', 'one'), ('qux', 'two')], dtype='object'), names=['foo', 'bar'])) result = pd.Index(expected, names=['A', 'B']) tm.assert_index_equal( result, Index(Index([('foo', 'one'), ('foo', 'two'), ('bar', 'one'), ('baz', 'two'), ('qux', 'one'), ('qux', 'two')], dtype='object'), names=['A', 'B'])) def test_numeric_compat(self): idx = self.create_index() with pytest.raises(TypeError, match="cannot perform __mul__"): idx * 1 with pytest.raises(TypeError, match="cannot perform __rmul__"): 1 * idx div_err = "cannot perform __truediv__" with pytest.raises(TypeError, match=div_err): idx / 1 div_err = div_err.replace(' __', ' __r') with pytest.raises(TypeError, match=div_err): 1 / idx with pytest.raises(TypeError, match="cannot perform __floordiv__"): idx // 1 with pytest.raises(TypeError, match="cannot perform __rfloordiv__"): 1 // idx def test_logical_compat(self): idx = self.create_index() with pytest.raises(TypeError, match='cannot perform all'): idx.all() with pytest.raises(TypeError, match='cannot perform any'): idx.any() def test_boolean_context_compat(self): # boolean context compat idx = self.create_index() with pytest.raises(ValueError, match='The truth value of a'): if idx: pass def test_reindex_base(self): idx = self.create_index() expected = np.arange(idx.size, dtype=np.intp) actual = idx.get_indexer(idx) tm.assert_numpy_array_equal(expected, actual) with pytest.raises(ValueError, match='Invalid fill method'): idx.get_indexer(idx, method='invalid') def test_get_indexer_consistency(self): # See GH 16819 for name, index in self.indices.items(): if isinstance(index, IntervalIndex): continue if index.is_unique or isinstance(index, CategoricalIndex): indexer = index.get_indexer(index[0:2]) assert isinstance(indexer, np.ndarray) assert indexer.dtype == np.intp else: e = "Reindexing only valid with uniquely valued Index objects" with pytest.raises(InvalidIndexError, match=e): index.get_indexer(index[0:2]) indexer, _ = index.get_indexer_non_unique(index[0:2]) assert isinstance(indexer, np.ndarray) assert indexer.dtype == np.intp def test_ndarray_compat_properties(self): idx = self.create_index() assert idx.T.equals(idx) assert idx.transpose().equals(idx) values = idx.values for prop in self._compat_props: assert getattr(idx, prop) == getattr(values, prop) # test for validity idx.nbytes idx.values.nbytes def test_repr_roundtrip(self): idx = self.create_index() tm.assert_index_equal(eval(repr(idx)), idx) def test_str(self): # test the string repr idx = self.create_index() idx.name = 'foo' assert "'foo'" in str(idx) assert idx.__class__.__name__ in str(idx) def test_repr_max_seq_item_setting(self): # GH10182 idx = self.create_index() idx = idx.repeat(50) with pd.option_context("display.max_seq_items", None): repr(idx) assert '...' not in str(idx) def test_copy_name(self): # gh-12309: Check that the "name" argument # passed at initialization is honored. for name, index in self.indices.items(): if isinstance(index, MultiIndex): continue first = index.__class__(index, copy=True, name='mario') second = first.__class__(first, copy=False) # Even though "copy=False", we want a new object. assert first is not second # Not using tm.assert_index_equal() since names differ. assert index.equals(first) assert first.name == 'mario' assert second.name == 'mario' s1 = Series(2, index=first) s2 = Series(3, index=second[:-1]) if not isinstance(index, CategoricalIndex): # See gh-13365 s3 = s1 * s2 assert s3.index.name == 'mario' def test_ensure_copied_data(self): # Check the "copy" argument of each Index.__new__ is honoured # GH12309 for name, index in self.indices.items(): init_kwargs = {} if isinstance(index, PeriodIndex): # Needs "freq" specification: init_kwargs['freq'] = index.freq elif isinstance(index, (RangeIndex, MultiIndex, CategoricalIndex)): # RangeIndex cannot be initialized from data # MultiIndex and CategoricalIndex are tested separately continue index_type = index.__class__ result = index_type(index.values, copy=True, **init_kwargs) tm.assert_index_equal(index, result) tm.assert_numpy_array_equal(index._ndarray_values, result._ndarray_values, check_same='copy') if isinstance(index, PeriodIndex): # .values an object array of Period, thus copied result = index_type(ordinal=index.asi8, copy=False, **init_kwargs) tm.assert_numpy_array_equal(index._ndarray_values, result._ndarray_values, check_same='same') elif isinstance(index, IntervalIndex): # checked in test_interval.py pass else: result = index_type(index.values, copy=False, **init_kwargs) tm.assert_numpy_array_equal(index.values, result.values, check_same='same') tm.assert_numpy_array_equal(index._ndarray_values, result._ndarray_values, check_same='same') def test_memory_usage(self): for name, index in self.indices.items(): result = index.memory_usage() if len(index): index.get_loc(index[0]) result2 = index.memory_usage() result3 = index.memory_usage(deep=True) # RangeIndex, IntervalIndex # don't have engines if not isinstance(index, (RangeIndex, IntervalIndex)): assert result2 > result if index.inferred_type == 'object': assert result3 > result2 else: # we report 0 for no-length assert result == 0 def test_argsort(self): for k, ind in self.indices.items(): # separately tested if k in ['catIndex']: continue result = ind.argsort() expected = np.array(ind).argsort() tm.assert_numpy_array_equal(result, expected, check_dtype=False) def test_numpy_argsort(self): for k, ind in self.indices.items(): result = np.argsort(ind) expected = ind.argsort() tm.assert_numpy_array_equal(result, expected) # these are the only two types that perform # pandas compatibility input validation - the # rest already perform separate (or no) such # validation via their 'values' attribute as # defined in pandas.core.indexes/base.py - they # cannot be changed at the moment due to # backwards compatibility concerns if isinstance(type(ind), (CategoricalIndex, RangeIndex)): msg = "the 'axis' parameter is not supported" with pytest.raises(ValueError, match=msg): np.argsort(ind, axis=1) msg = "the 'kind' parameter is not supported" with pytest.raises(ValueError, match=msg): np.argsort(ind, kind='mergesort') msg = "the 'order' parameter is not supported" with pytest.raises(ValueError, match=msg): np.argsort(ind, order=('a', 'b')) def test_take(self): indexer = [4, 3, 0, 2] for k, ind in self.indices.items(): # separate if k in ['boolIndex', 'tuples', 'empty']: continue result = ind.take(indexer) expected = ind[indexer] assert result.equals(expected) if not isinstance(ind, (DatetimeIndex, PeriodIndex, TimedeltaIndex)): # GH 10791 with pytest.raises(AttributeError): ind.freq def test_take_invalid_kwargs(self): idx = self.create_index() indices = [1, 2] msg = r"take\(\) got an unexpected keyword argument 'foo'" with pytest.raises(TypeError, match=msg): idx.take(indices, foo=2) msg = "the 'out' parameter is not supported" with pytest.raises(ValueError, match=msg): idx.take(indices, out=indices) msg = "the 'mode' parameter is not supported" with pytest.raises(ValueError, match=msg): idx.take(indices, mode='clip') def test_repeat(self): rep = 2 i = self.create_index() expected = pd.Index(i.values.repeat(rep), name=i.name) tm.assert_index_equal(i.repeat(rep), expected) i = self.create_index() rep = np.arange(len(i)) expected = pd.Index(i.values.repeat(rep), name=i.name) tm.assert_index_equal(i.repeat(rep), expected) def test_numpy_repeat(self): rep = 2 i = self.create_index() expected = i.repeat(rep) tm.assert_index_equal(np.repeat(i, rep), expected) msg = "the 'axis' parameter is not supported" with pytest.raises(ValueError, match=msg): np.repeat(i, rep, axis=0) @pytest.mark.parametrize('klass', [list, tuple, np.array, Series]) def test_where(self, klass): i = self.create_index() cond = [True] * len(i) result = i.where(klass(cond)) expected = i tm.assert_index_equal(result, expected) cond = [False] + [True] * len(i[1:]) expected = pd.Index([i._na_value] + i[1:].tolist(), dtype=i.dtype) result = i.where(klass(cond)) tm.assert_index_equal(result, expected) @pytest.mark.parametrize("case", [0.5, "xxx"]) @pytest.mark.parametrize("method", ["intersection", "union", "difference", "symmetric_difference"]) def test_set_ops_error_cases(self, case, method): for name, idx in self.indices.items(): # non-iterable input msg = "Input must be Index or array-like" with pytest.raises(TypeError, match=msg): getattr(idx, method)(case) def test_intersection_base(self): for name, idx in self.indices.items(): first = idx[:5] second = idx[:3] intersect = first.intersection(second) if isinstance(idx, CategoricalIndex): pass else: assert tm.equalContents(intersect, second) # GH 10149 cases = [klass(second.values) for klass in [np.array, Series, list]] for case in cases: if isinstance(idx, CategoricalIndex): pass else: result = first.intersection(case) assert tm.equalContents(result, second) if isinstance(idx, MultiIndex): msg = "other must be a MultiIndex or a list of tuples" with pytest.raises(TypeError, match=msg): first.intersection([1, 2, 3]) def test_union_base(self): for name, idx in self.indices.items(): first = idx[3:] second = idx[:5] everything = idx union = first.union(second) assert tm.equalContents(union, everything) # GH 10149 cases = [klass(second.values) for klass in [np.array, Series, list]] for case in cases: if isinstance(idx, CategoricalIndex): pass else: result = first.union(case) assert tm.equalContents(result, everything) if isinstance(idx, MultiIndex): msg = "other must be a MultiIndex or a list of tuples" with pytest.raises(TypeError, match=msg): first.union([1, 2, 3]) @pytest.mark.parametrize("sort", [None, False]) def test_difference_base(self, sort): for name, idx in self.indices.items(): first = idx[2:] second = idx[:4] answer = idx[4:] result = first.difference(second, sort) if isinstance(idx, CategoricalIndex): pass else: assert tm.equalContents(result, answer) # GH 10149 cases = [klass(second.values) for klass in [np.array, Series, list]] for case in cases: if isinstance(idx, CategoricalIndex): pass elif isinstance(idx, (DatetimeIndex, TimedeltaIndex)): assert result.__class__ == answer.__class__ tm.assert_numpy_array_equal(result.sort_values().asi8, answer.sort_values().asi8) else: result = first.difference(case, sort) assert tm.equalContents(result, answer) if isinstance(idx, MultiIndex): msg = "other must be a MultiIndex or a list of tuples" with pytest.raises(TypeError, match=msg): first.difference([1, 2, 3], sort) def test_symmetric_difference(self): for name, idx in self.indices.items(): first = idx[1:] second = idx[:-1] if isinstance(idx, CategoricalIndex): pass else: answer = idx[[0, -1]] result = first.symmetric_difference(second) assert tm.equalContents(result, answer) # GH 10149 cases = [klass(second.values) for klass in [np.array, Series, list]] for case in cases: if isinstance(idx, CategoricalIndex): pass else: result = first.symmetric_difference(case) assert tm.equalContents(result, answer) if isinstance(idx, MultiIndex): msg = "other must be a MultiIndex or a list of tuples" with pytest.raises(TypeError, match=msg): first.symmetric_difference([1, 2, 3]) def test_insert_base(self): for name, idx in self.indices.items(): result = idx[1:4] if not len(idx): continue # test 0th element assert idx[0:4].equals(result.insert(0, idx[0])) def test_delete_base(self): for name, idx in self.indices.items(): if not len(idx): continue if isinstance(idx, RangeIndex): # tested in class continue expected = idx[1:] result = idx.delete(0) assert result.equals(expected) assert result.name == expected.name expected = idx[:-1] result = idx.delete(-1) assert result.equals(expected) assert result.name == expected.name with pytest.raises((IndexError, ValueError)): # either depending on numpy version idx.delete(len(idx)) def test_equals(self): for name, idx in self.indices.items(): assert idx.equals(idx) assert idx.equals(idx.copy()) assert idx.equals(idx.astype(object)) assert not idx.equals(list(idx)) assert not idx.equals(np.array(idx)) # Cannot pass in non-int64 dtype to RangeIndex if not isinstance(idx, RangeIndex): same_values = Index(idx, dtype=object) assert idx.equals(same_values) assert same_values.equals(idx) if idx.nlevels == 1: # do not test MultiIndex assert not idx.equals(pd.Series(idx)) def test_equals_op(self): # GH9947, GH10637 index_a = self.create_index() if isinstance(index_a, PeriodIndex): pytest.skip('Skip check for PeriodIndex') n = len(index_a) index_b = index_a[0:-1] index_c = index_a[0:-1].append(index_a[-2:-1]) index_d = index_a[0:1] msg = "Lengths must match|could not be broadcast" with pytest.raises(ValueError, match=msg): index_a == index_b expected1 = np.array([True] * n) expected2 = np.array([True] * (n - 1) + [False]) tm.assert_numpy_array_equal(index_a == index_a, expected1) tm.assert_numpy_array_equal(index_a == index_c, expected2) # test comparisons with numpy arrays array_a = np.array(index_a) array_b = np.array(index_a[0:-1]) array_c = np.array(index_a[0:-1].append(index_a[-2:-1])) array_d = np.array(index_a[0:1]) with pytest.raises(ValueError, match=msg): index_a == array_b tm.assert_numpy_array_equal(index_a == array_a, expected1) tm.assert_numpy_array_equal(index_a == array_c, expected2) # test comparisons with Series series_a = Series(array_a) series_b = Series(array_b) series_c = Series(array_c) series_d = Series(array_d) with pytest.raises(ValueError, match=msg): index_a == series_b tm.assert_numpy_array_equal(index_a == series_a, expected1) tm.assert_numpy_array_equal(index_a == series_c, expected2) # cases where length is 1 for one of them with pytest.raises(ValueError, match="Lengths must match"): index_a == index_d with pytest.raises(ValueError, match="Lengths must match"): index_a == series_d with pytest.raises(ValueError, match="Lengths must match"): index_a == array_d msg = "Can only compare identically-labeled Series objects" with pytest.raises(ValueError, match=msg): series_a == series_d with pytest.raises(ValueError, match="Lengths must match"): series_a == array_d # comparing with a scalar should broadcast; note that we are excluding # MultiIndex because in this case each item in the index is a tuple of # length 2, and therefore is considered an array of length 2 in the # comparison instead of a scalar if not isinstance(index_a, MultiIndex): expected3 = np.array([False] * (len(index_a) - 2) + [True, False]) # assuming the 2nd to last item is unique in the data item = index_a[-2] tm.assert_numpy_array_equal(index_a == item, expected3) tm.assert_series_equal(series_a == item, Series(expected3)) def test_hasnans_isnans(self): # GH 11343, added tests for hasnans / isnans for name, index in self.indices.items(): if isinstance(index, MultiIndex): pass else: idx = index.copy() # cases in indices doesn't include NaN expected = np.array([False] * len(idx), dtype=bool) tm.assert_numpy_array_equal(idx._isnan, expected) assert idx.hasnans is False idx = index.copy() values = np.asarray(idx.values) if len(index) == 0: continue elif isinstance(index, DatetimeIndexOpsMixin): values[1] = iNaT elif isinstance(index, (Int64Index, UInt64Index)): continue else: values[1] = np.nan if isinstance(index, PeriodIndex): idx = index.__class__(values, freq=index.freq) else: idx = index.__class__(values) expected = np.array([False] * len(idx), dtype=bool) expected[1] = True tm.assert_numpy_array_equal(idx._isnan, expected) assert idx.hasnans is True def test_fillna(self): # GH 11343 for name, index in self.indices.items(): if len(index) == 0: pass elif isinstance(index, MultiIndex): idx = index.copy() msg = "isna is not defined for MultiIndex" with pytest.raises(NotImplementedError, match=msg): idx.fillna(idx[0]) else: idx = index.copy() result = idx.fillna(idx[0]) tm.assert_index_equal(result, idx) assert result is not idx msg = "'value' must be a scalar, passed: " with pytest.raises(TypeError, match=msg): idx.fillna([idx[0]]) idx = index.copy() values = np.asarray(idx.values) if isinstance(index, DatetimeIndexOpsMixin): values[1] = iNaT elif isinstance(index, (Int64Index, UInt64Index)): continue else: values[1] = np.nan if isinstance(index, PeriodIndex): idx = index.__class__(values, freq=index.freq) else: idx = index.__class__(values) expected = np.array([False] * len(idx), dtype=bool) expected[1] = True tm.assert_numpy_array_equal(idx._isnan, expected) assert idx.hasnans is True def test_nulls(self): # this is really a smoke test for the methods # as these are adequately tested for function elsewhere for name, index in self.indices.items(): if len(index) == 0: tm.assert_numpy_array_equal( index.isna(), np.array([], dtype=bool)) elif isinstance(index, MultiIndex): idx = index.copy() msg = "isna is not defined for MultiIndex" with pytest.raises(NotImplementedError, match=msg): idx.isna() else: if not index.hasnans: tm.assert_numpy_array_equal( index.isna(), np.zeros(len(index), dtype=bool)) tm.assert_numpy_array_equal( index.notna(), np.ones(len(index), dtype=bool)) else: result = isna(index) tm.assert_numpy_array_equal(index.isna(), result) tm.assert_numpy_array_equal(index.notna(), ~result) def test_empty(self): # GH 15270 index = self.create_index() assert not index.empty assert index[:0].empty def test_join_self_unique(self, join_type): index = self.create_index() if index.is_unique: joined = index.join(index, how=join_type) assert (index == joined).all() def test_map(self): # callable index = self.create_index() # we don't infer UInt64 if isinstance(index, pd.UInt64Index): expected = index.astype('int64') else: expected = index result = index.map(lambda x: x) tm.assert_index_equal(result, expected) @pytest.mark.parametrize( "mapper", [ lambda values, index: {i: e for e, i in zip(values, index)}, lambda values, index: pd.Series(values, index)]) def test_map_dictlike(self, mapper): index = self.create_index() if isinstance(index, (pd.CategoricalIndex, pd.IntervalIndex)): pytest.skip("skipping tests for {}".format(type(index))) identity = mapper(index.values, index) # we don't infer to UInt64 for a dict if isinstance(index, pd.UInt64Index) and isinstance(identity, dict): expected = index.astype('int64') else: expected = index result = index.map(identity) tm.assert_index_equal(result, expected) # empty mappable expected = pd.Index([np.nan] * len(index)) result = index.map(mapper(expected, index)) tm.assert_index_equal(result, expected) def test_putmask_with_wrong_mask(self): # GH18368 index = self.create_index() with pytest.raises(ValueError): index.putmask(np.ones(len(index) + 1, np.bool), 1) with pytest.raises(ValueError): index.putmask(np.ones(len(index) - 1, np.bool), 1) with pytest.raises(ValueError): index.putmask('foo', 1) @pytest.mark.parametrize('copy', [True, False]) @pytest.mark.parametrize('name', [None, 'foo']) @pytest.mark.parametrize('ordered', [True, False]) def test_astype_category(self, copy, name, ordered): # GH 18630 index = self.create_index() if name: index = index.rename(name) # standard categories dtype = CategoricalDtype(ordered=ordered) result = index.astype(dtype, copy=copy) expected = CategoricalIndex(index.values, name=name, ordered=ordered) tm.assert_index_equal(result, expected) # non-standard categories dtype = CategoricalDtype(index.unique().tolist()[:-1], ordered) result = index.astype(dtype, copy=copy) expected = CategoricalIndex(index.values, name=name, dtype=dtype) tm.assert_index_equal(result, expected) if ordered is False: # dtype='category' defaults to ordered=False, so only test once result = index.astype('category', copy=copy) expected = CategoricalIndex(index.values, name=name) tm.assert_index_equal(result, expected) def test_is_unique(self): # initialize a unique index index = self.create_index().drop_duplicates() assert index.is_unique is True # empty index should be unique index_empty = index[:0] assert index_empty.is_unique is True # test basic dupes index_dup = index.insert(0, index[0]) assert index_dup.is_unique is False # single NA should be unique index_na = index.insert(0, np.nan) assert index_na.is_unique is True # multiple NA should not be unique index_na_dup = index_na.insert(0, np.nan) assert index_na_dup.is_unique is False
import pytest import pandas as pd from pandas import TimedeltaIndex class TestTimedeltaIndexRendering: @pytest.mark.parametrize('method', ['__repr__', '__str__']) def test_representation(self, method): idx1 = TimedeltaIndex([], freq='D') idx2 = TimedeltaIndex(['1 days'], freq='D') idx3 = TimedeltaIndex(['1 days', '2 days'], freq='D') idx4 = TimedeltaIndex(['1 days', '2 days', '3 days'], freq='D') idx5 = TimedeltaIndex(['1 days 00:00:01', '2 days', '3 days']) exp1 = """TimedeltaIndex([], dtype='timedelta64[ns]', freq='D')""" exp2 = ("TimedeltaIndex(['1 days'], dtype='timedelta64[ns]', " "freq='D')") exp3 = ("TimedeltaIndex(['1 days', '2 days'], " "dtype='timedelta64[ns]', freq='D')") exp4 = ("TimedeltaIndex(['1 days', '2 days', '3 days'], " "dtype='timedelta64[ns]', freq='D')") exp5 = ("TimedeltaIndex(['1 days 00:00:01', '2 days 00:00:00', " "'3 days 00:00:00'], dtype='timedelta64[ns]', freq=None)") with pd.option_context('display.width', 300): for idx, expected in zip([idx1, idx2, idx3, idx4, idx5], [exp1, exp2, exp3, exp4, exp5]): result = getattr(idx, method)() assert result == expected def test_representation_to_series(self): idx1 = TimedeltaIndex([], freq='D') idx2 = TimedeltaIndex(['1 days'], freq='D') idx3 = TimedeltaIndex(['1 days', '2 days'], freq='D') idx4 = TimedeltaIndex(['1 days', '2 days', '3 days'], freq='D') idx5 = TimedeltaIndex(['1 days 00:00:01', '2 days', '3 days']) exp1 = """Series([], dtype: timedelta64[ns])""" exp2 = ("0 1 days\n" "dtype: timedelta64[ns]") exp3 = ("0 1 days\n" "1 2 days\n" "dtype: timedelta64[ns]") exp4 = ("0 1 days\n" "1 2 days\n" "2 3 days\n" "dtype: timedelta64[ns]") exp5 = ("0 1 days 00:00:01\n" "1 2 days 00:00:00\n" "2 3 days 00:00:00\n" "dtype: timedelta64[ns]") with pd.option_context('display.width', 300): for idx, expected in zip([idx1, idx2, idx3, idx4, idx5], [exp1, exp2, exp3, exp4, exp5]): result = repr(pd.Series(idx)) assert result == expected def test_summary(self): # GH#9116 idx1 = TimedeltaIndex([], freq='D') idx2 = TimedeltaIndex(['1 days'], freq='D') idx3 = TimedeltaIndex(['1 days', '2 days'], freq='D') idx4 = TimedeltaIndex(['1 days', '2 days', '3 days'], freq='D') idx5 = TimedeltaIndex(['1 days 00:00:01', '2 days', '3 days']) exp1 = ("TimedeltaIndex: 0 entries\n" "Freq: D") exp2 = ("TimedeltaIndex: 1 entries, 1 days to 1 days\n" "Freq: D") exp3 = ("TimedeltaIndex: 2 entries, 1 days to 2 days\n" "Freq: D") exp4 = ("TimedeltaIndex: 3 entries, 1 days to 3 days\n" "Freq: D") exp5 = ("TimedeltaIndex: 3 entries, 1 days 00:00:01 to 3 days " "00:00:00") for idx, expected in zip([idx1, idx2, idx3, idx4, idx5], [exp1, exp2, exp3, exp4, exp5]): result = idx._summary() assert result == expected
cbertinato/pandas
pandas/tests/indexes/timedeltas/test_formats.py
pandas/tests/indexes/common.py
import numpy as np from pandas._libs import algos as libalgos, index as libindex import pandas.util.testing as tm class TestNumericEngine: def test_is_monotonic(self, numeric_indexing_engine_type_and_dtype): engine_type, dtype = numeric_indexing_engine_type_and_dtype num = 1000 arr = np.array([1] * num + [2] * num + [3] * num, dtype=dtype) # monotonic increasing engine = engine_type(lambda: arr, len(arr)) assert engine.is_monotonic_increasing is True assert engine.is_monotonic_decreasing is False # monotonic decreasing engine = engine_type(lambda: arr[::-1], len(arr)) assert engine.is_monotonic_increasing is False assert engine.is_monotonic_decreasing is True # neither monotonic increasing or decreasing arr = np.array([1] * num + [2] * num + [1] * num, dtype=dtype) engine = engine_type(lambda: arr[::-1], len(arr)) assert engine.is_monotonic_increasing is False assert engine.is_monotonic_decreasing is False def test_is_unique(self, numeric_indexing_engine_type_and_dtype): engine_type, dtype = numeric_indexing_engine_type_and_dtype # unique arr = np.array([1, 3, 2], dtype=dtype) engine = engine_type(lambda: arr, len(arr)) assert engine.is_unique is True # not unique arr = np.array([1, 2, 1], dtype=dtype) engine = engine_type(lambda: arr, len(arr)) assert engine.is_unique is False def test_get_loc(self, numeric_indexing_engine_type_and_dtype): engine_type, dtype = numeric_indexing_engine_type_and_dtype # unique arr = np.array([1, 2, 3], dtype=dtype) engine = engine_type(lambda: arr, len(arr)) assert engine.get_loc(2) == 1 # monotonic num = 1000 arr = np.array([1] * num + [2] * num + [3] * num, dtype=dtype) engine = engine_type(lambda: arr, len(arr)) assert engine.get_loc(2) == slice(1000, 2000) # not monotonic arr = np.array([1, 2, 3] * num, dtype=dtype) engine = engine_type(lambda: arr, len(arr)) expected = np.array([False, True, False] * num, dtype=bool) result = engine.get_loc(2) assert (result == expected).all() def test_get_backfill_indexer( self, numeric_indexing_engine_type_and_dtype): engine_type, dtype = numeric_indexing_engine_type_and_dtype arr = np.array([1, 5, 10], dtype=dtype) engine = engine_type(lambda: arr, len(arr)) new = np.arange(12, dtype=dtype) result = engine.get_backfill_indexer(new) expected = libalgos.backfill(arr, new) tm.assert_numpy_array_equal(result, expected) def test_get_pad_indexer( self, numeric_indexing_engine_type_and_dtype): engine_type, dtype = numeric_indexing_engine_type_and_dtype arr = np.array([1, 5, 10], dtype=dtype) engine = engine_type(lambda: arr, len(arr)) new = np.arange(12, dtype=dtype) result = engine.get_pad_indexer(new) expected = libalgos.pad(arr, new) tm.assert_numpy_array_equal(result, expected) class TestObjectEngine: engine_type = libindex.ObjectEngine dtype = np.object_ values = list('abc') def test_is_monotonic(self): num = 1000 arr = np.array(['a'] * num + ['a'] * num + ['c'] * num, dtype=self.dtype) # monotonic increasing engine = self.engine_type(lambda: arr, len(arr)) assert engine.is_monotonic_increasing is True assert engine.is_monotonic_decreasing is False # monotonic decreasing engine = self.engine_type(lambda: arr[::-1], len(arr)) assert engine.is_monotonic_increasing is False assert engine.is_monotonic_decreasing is True # neither monotonic increasing or decreasing arr = np.array(['a'] * num + ['b'] * num + ['a'] * num, dtype=self.dtype) engine = self.engine_type(lambda: arr[::-1], len(arr)) assert engine.is_monotonic_increasing is False assert engine.is_monotonic_decreasing is False def test_is_unique(self): # unique arr = np.array(self.values, dtype=self.dtype) engine = self.engine_type(lambda: arr, len(arr)) assert engine.is_unique is True # not unique arr = np.array(['a', 'b', 'a'], dtype=self.dtype) engine = self.engine_type(lambda: arr, len(arr)) assert engine.is_unique is False def test_get_loc(self): # unique arr = np.array(self.values, dtype=self.dtype) engine = self.engine_type(lambda: arr, len(arr)) assert engine.get_loc('b') == 1 # monotonic num = 1000 arr = np.array(['a'] * num + ['b'] * num + ['c'] * num, dtype=self.dtype) engine = self.engine_type(lambda: arr, len(arr)) assert engine.get_loc('b') == slice(1000, 2000) # not monotonic arr = np.array(self.values * num, dtype=self.dtype) engine = self.engine_type(lambda: arr, len(arr)) expected = np.array([False, True, False] * num, dtype=bool) result = engine.get_loc('b') assert (result == expected).all() def test_get_backfill_indexer(self): arr = np.array(['a', 'e', 'j'], dtype=self.dtype) engine = self.engine_type(lambda: arr, len(arr)) new = np.array(list('abcdefghij'), dtype=self.dtype) result = engine.get_backfill_indexer(new) expected = libalgos.backfill["object"](arr, new) tm.assert_numpy_array_equal(result, expected) def test_get_pad_indexer(self): arr = np.array(['a', 'e', 'j'], dtype=self.dtype) engine = self.engine_type(lambda: arr, len(arr)) new = np.array(list('abcdefghij'), dtype=self.dtype) result = engine.get_pad_indexer(new) expected = libalgos.pad["object"](arr, new) tm.assert_numpy_array_equal(result, expected)
import pytest import pandas as pd from pandas import TimedeltaIndex class TestTimedeltaIndexRendering: @pytest.mark.parametrize('method', ['__repr__', '__str__']) def test_representation(self, method): idx1 = TimedeltaIndex([], freq='D') idx2 = TimedeltaIndex(['1 days'], freq='D') idx3 = TimedeltaIndex(['1 days', '2 days'], freq='D') idx4 = TimedeltaIndex(['1 days', '2 days', '3 days'], freq='D') idx5 = TimedeltaIndex(['1 days 00:00:01', '2 days', '3 days']) exp1 = """TimedeltaIndex([], dtype='timedelta64[ns]', freq='D')""" exp2 = ("TimedeltaIndex(['1 days'], dtype='timedelta64[ns]', " "freq='D')") exp3 = ("TimedeltaIndex(['1 days', '2 days'], " "dtype='timedelta64[ns]', freq='D')") exp4 = ("TimedeltaIndex(['1 days', '2 days', '3 days'], " "dtype='timedelta64[ns]', freq='D')") exp5 = ("TimedeltaIndex(['1 days 00:00:01', '2 days 00:00:00', " "'3 days 00:00:00'], dtype='timedelta64[ns]', freq=None)") with pd.option_context('display.width', 300): for idx, expected in zip([idx1, idx2, idx3, idx4, idx5], [exp1, exp2, exp3, exp4, exp5]): result = getattr(idx, method)() assert result == expected def test_representation_to_series(self): idx1 = TimedeltaIndex([], freq='D') idx2 = TimedeltaIndex(['1 days'], freq='D') idx3 = TimedeltaIndex(['1 days', '2 days'], freq='D') idx4 = TimedeltaIndex(['1 days', '2 days', '3 days'], freq='D') idx5 = TimedeltaIndex(['1 days 00:00:01', '2 days', '3 days']) exp1 = """Series([], dtype: timedelta64[ns])""" exp2 = ("0 1 days\n" "dtype: timedelta64[ns]") exp3 = ("0 1 days\n" "1 2 days\n" "dtype: timedelta64[ns]") exp4 = ("0 1 days\n" "1 2 days\n" "2 3 days\n" "dtype: timedelta64[ns]") exp5 = ("0 1 days 00:00:01\n" "1 2 days 00:00:00\n" "2 3 days 00:00:00\n" "dtype: timedelta64[ns]") with pd.option_context('display.width', 300): for idx, expected in zip([idx1, idx2, idx3, idx4, idx5], [exp1, exp2, exp3, exp4, exp5]): result = repr(pd.Series(idx)) assert result == expected def test_summary(self): # GH#9116 idx1 = TimedeltaIndex([], freq='D') idx2 = TimedeltaIndex(['1 days'], freq='D') idx3 = TimedeltaIndex(['1 days', '2 days'], freq='D') idx4 = TimedeltaIndex(['1 days', '2 days', '3 days'], freq='D') idx5 = TimedeltaIndex(['1 days 00:00:01', '2 days', '3 days']) exp1 = ("TimedeltaIndex: 0 entries\n" "Freq: D") exp2 = ("TimedeltaIndex: 1 entries, 1 days to 1 days\n" "Freq: D") exp3 = ("TimedeltaIndex: 2 entries, 1 days to 2 days\n" "Freq: D") exp4 = ("TimedeltaIndex: 3 entries, 1 days to 3 days\n" "Freq: D") exp5 = ("TimedeltaIndex: 3 entries, 1 days 00:00:01 to 3 days " "00:00:00") for idx, expected in zip([idx1, idx2, idx3, idx4, idx5], [exp1, exp2, exp3, exp4, exp5]): result = idx._summary() assert result == expected
cbertinato/pandas
pandas/tests/indexes/timedeltas/test_formats.py
pandas/tests/indexing/test_indexing_engines.py
""" Arithmetic operations for PandasObjects This is not a public API. """ import datetime import operator import textwrap from typing import Dict, Optional import warnings import numpy as np from pandas._libs import algos as libalgos, lib, ops as libops from pandas.errors import NullFrequencyError from pandas.util._decorators import Appender from pandas.core.dtypes.cast import ( construct_1d_object_array_from_listlike, find_common_type, maybe_upcast_putmask) from pandas.core.dtypes.common import ( ensure_object, is_bool_dtype, is_categorical_dtype, is_datetime64_dtype, is_datetime64tz_dtype, is_datetimelike_v_numeric, is_extension_array_dtype, is_integer_dtype, is_list_like, is_object_dtype, is_period_dtype, is_scalar, is_timedelta64_dtype, needs_i8_conversion) from pandas.core.dtypes.generic import ( ABCDataFrame, ABCIndex, ABCIndexClass, ABCSeries, ABCSparseArray, ABCSparseSeries) from pandas.core.dtypes.missing import isna, notna import pandas as pd import pandas.core.common as com import pandas.core.missing as missing # ----------------------------------------------------------------------------- # Ops Wrapping Utilities def get_op_result_name(left, right): """ Find the appropriate name to pin to an operation result. This result should always be either an Index or a Series. Parameters ---------- left : {Series, Index} right : object Returns ------- name : object Usually a string """ # `left` is always a pd.Series when called from within ops if isinstance(right, (ABCSeries, pd.Index)): name = _maybe_match_name(left, right) else: name = left.name return name def _maybe_match_name(a, b): """ Try to find a name to attach to the result of an operation between a and b. If only one of these has a `name` attribute, return that name. Otherwise return a consensus name if they match of None if they have different names. Parameters ---------- a : object b : object Returns ------- name : str or None See Also -------- pandas.core.common.consensus_name_attr """ a_has = hasattr(a, 'name') b_has = hasattr(b, 'name') if a_has and b_has: if a.name == b.name: return a.name else: # TODO: what if they both have np.nan for their names? return None elif a_has: return a.name elif b_has: return b.name return None def maybe_upcast_for_op(obj): """ Cast non-pandas objects to pandas types to unify behavior of arithmetic and comparison operations. Parameters ---------- obj: object Returns ------- out : object Notes ----- Be careful to call this *after* determining the `name` attribute to be attached to the result of the arithmetic operation. """ if type(obj) is datetime.timedelta: # GH#22390 cast up to Timedelta to rely on Timedelta # implementation; otherwise operation against numeric-dtype # raises TypeError return pd.Timedelta(obj) elif isinstance(obj, np.timedelta64) and not isna(obj): # In particular non-nanosecond timedelta64 needs to be cast to # nanoseconds, or else we get undesired behavior like # np.timedelta64(3, 'D') / 2 == np.timedelta64(1, 'D') # The isna check is to avoid casting timedelta64("NaT"), which would # return NaT and incorrectly be treated as a datetime-NaT. return pd.Timedelta(obj) elif isinstance(obj, np.ndarray) and is_timedelta64_dtype(obj): # GH#22390 Unfortunately we need to special-case right-hand # timedelta64 dtypes because numpy casts integer dtypes to # timedelta64 when operating with timedelta64 return pd.TimedeltaIndex(obj) return obj # ----------------------------------------------------------------------------- # Reversed Operations not available in the stdlib operator module. # Defining these instead of using lambdas allows us to reference them by name. def radd(left, right): return right + left def rsub(left, right): return right - left def rmul(left, right): return right * left def rdiv(left, right): return right / left def rtruediv(left, right): return right / left def rfloordiv(left, right): return right // left def rmod(left, right): # check if right is a string as % is the string # formatting operation; this is a TypeError # otherwise perform the op if isinstance(right, str): raise TypeError("{typ} cannot perform the operation mod".format( typ=type(left).__name__)) return right % left def rdivmod(left, right): return divmod(right, left) def rpow(left, right): return right ** left def rand_(left, right): return operator.and_(right, left) def ror_(left, right): return operator.or_(right, left) def rxor(left, right): return operator.xor(right, left) # ----------------------------------------------------------------------------- def make_invalid_op(name): """ Return a binary method that always raises a TypeError. Parameters ---------- name : str Returns ------- invalid_op : function """ def invalid_op(self, other=None): raise TypeError("cannot perform {name} with this index type: " "{typ}".format(name=name, typ=type(self).__name__)) invalid_op.__name__ = name return invalid_op def _gen_eval_kwargs(name): """ Find the keyword arguments to pass to numexpr for the given operation. Parameters ---------- name : str Returns ------- eval_kwargs : dict Examples -------- >>> _gen_eval_kwargs("__add__") {} >>> _gen_eval_kwargs("rtruediv") {'reversed': True, 'truediv': True} """ kwargs = {} # Series appear to only pass __add__, __radd__, ... # but DataFrame gets both these dunder names _and_ non-dunder names # add, radd, ... name = name.replace('__', '') if name.startswith('r'): if name not in ['radd', 'rand', 'ror', 'rxor']: # Exclude commutative operations kwargs['reversed'] = True if name in ['truediv', 'rtruediv']: kwargs['truediv'] = True if name in ['ne']: kwargs['masker'] = True return kwargs def _gen_fill_zeros(name): """ Find the appropriate fill value to use when filling in undefined values in the results of the given operation caused by operating on (generally dividing by) zero. Parameters ---------- name : str Returns ------- fill_value : {None, np.nan, np.inf} """ name = name.strip('__') if 'div' in name: # truediv, floordiv, div, and reversed variants fill_value = np.inf elif 'mod' in name: # mod, rmod fill_value = np.nan else: fill_value = None return fill_value def _get_frame_op_default_axis(name): """ Only DataFrame cares about default_axis, specifically: special methods have default_axis=None and flex methods have default_axis='columns'. Parameters ---------- name : str Returns ------- default_axis: str or None """ if name.replace('__r', '__') in ['__and__', '__or__', '__xor__']: # bool methods return 'columns' elif name.startswith('__'): # __add__, __mul__, ... return None else: # add, mul, ... return 'columns' def _get_opstr(op, cls): """ Find the operation string, if any, to pass to numexpr for this operation. Parameters ---------- op : binary operator cls : class Returns ------- op_str : string or None """ # numexpr is available for non-sparse classes subtyp = getattr(cls, '_subtyp', '') use_numexpr = 'sparse' not in subtyp if not use_numexpr: # if we're not using numexpr, then don't pass a str_rep return None return {operator.add: '+', radd: '+', operator.mul: '*', rmul: '*', operator.sub: '-', rsub: '-', operator.truediv: '/', rtruediv: '/', operator.floordiv: '//', rfloordiv: '//', operator.mod: None, # TODO: Why None for mod but '%' for rmod? rmod: '%', operator.pow: '**', rpow: '**', operator.eq: '==', operator.ne: '!=', operator.le: '<=', operator.lt: '<', operator.ge: '>=', operator.gt: '>', operator.and_: '&', rand_: '&', operator.or_: '|', ror_: '|', operator.xor: '^', rxor: '^', divmod: None, rdivmod: None}[op] def _get_op_name(op, special): """ Find the name to attach to this method according to conventions for special and non-special methods. Parameters ---------- op : binary operator special : bool Returns ------- op_name : str """ opname = op.__name__.strip('_') if special: opname = '__{opname}__'.format(opname=opname) return opname # ----------------------------------------------------------------------------- # Docstring Generation and Templates _add_example_SERIES = """ Examples -------- >>> a = pd.Series([1, 1, 1, np.nan], index=['a', 'b', 'c', 'd']) >>> a a 1.0 b 1.0 c 1.0 d NaN dtype: float64 >>> b = pd.Series([1, np.nan, 1, np.nan], index=['a', 'b', 'd', 'e']) >>> b a 1.0 b NaN d 1.0 e NaN dtype: float64 >>> a.add(b, fill_value=0) a 2.0 b 1.0 c 1.0 d 1.0 e NaN dtype: float64 """ _sub_example_SERIES = """ Examples -------- >>> a = pd.Series([1, 1, 1, np.nan], index=['a', 'b', 'c', 'd']) >>> a a 1.0 b 1.0 c 1.0 d NaN dtype: float64 >>> b = pd.Series([1, np.nan, 1, np.nan], index=['a', 'b', 'd', 'e']) >>> b a 1.0 b NaN d 1.0 e NaN dtype: float64 >>> a.subtract(b, fill_value=0) a 0.0 b 1.0 c 1.0 d -1.0 e NaN dtype: float64 """ _mul_example_SERIES = """ Examples -------- >>> a = pd.Series([1, 1, 1, np.nan], index=['a', 'b', 'c', 'd']) >>> a a 1.0 b 1.0 c 1.0 d NaN dtype: float64 >>> b = pd.Series([1, np.nan, 1, np.nan], index=['a', 'b', 'd', 'e']) >>> b a 1.0 b NaN d 1.0 e NaN dtype: float64 >>> a.multiply(b, fill_value=0) a 1.0 b 0.0 c 0.0 d 0.0 e NaN dtype: float64 """ _div_example_SERIES = """ Examples -------- >>> a = pd.Series([1, 1, 1, np.nan], index=['a', 'b', 'c', 'd']) >>> a a 1.0 b 1.0 c 1.0 d NaN dtype: float64 >>> b = pd.Series([1, np.nan, 1, np.nan], index=['a', 'b', 'd', 'e']) >>> b a 1.0 b NaN d 1.0 e NaN dtype: float64 >>> a.divide(b, fill_value=0) a 1.0 b inf c inf d 0.0 e NaN dtype: float64 """ _floordiv_example_SERIES = """ Examples -------- >>> a = pd.Series([1, 1, 1, np.nan], index=['a', 'b', 'c', 'd']) >>> a a 1.0 b 1.0 c 1.0 d NaN dtype: float64 >>> b = pd.Series([1, np.nan, 1, np.nan], index=['a', 'b', 'd', 'e']) >>> b a 1.0 b NaN d 1.0 e NaN dtype: float64 >>> a.floordiv(b, fill_value=0) a 1.0 b NaN c NaN d 0.0 e NaN dtype: float64 """ _mod_example_SERIES = """ Examples -------- >>> a = pd.Series([1, 1, 1, np.nan], index=['a', 'b', 'c', 'd']) >>> a a 1.0 b 1.0 c 1.0 d NaN dtype: float64 >>> b = pd.Series([1, np.nan, 1, np.nan], index=['a', 'b', 'd', 'e']) >>> b a 1.0 b NaN d 1.0 e NaN dtype: float64 >>> a.mod(b, fill_value=0) a 0.0 b NaN c NaN d 0.0 e NaN dtype: float64 """ _pow_example_SERIES = """ Examples -------- >>> a = pd.Series([1, 1, 1, np.nan], index=['a', 'b', 'c', 'd']) >>> a a 1.0 b 1.0 c 1.0 d NaN dtype: float64 >>> b = pd.Series([1, np.nan, 1, np.nan], index=['a', 'b', 'd', 'e']) >>> b a 1.0 b NaN d 1.0 e NaN dtype: float64 >>> a.pow(b, fill_value=0) a 1.0 b 1.0 c 1.0 d 0.0 e NaN dtype: float64 """ _op_descriptions = { # Arithmetic Operators 'add': {'op': '+', 'desc': 'Addition', 'reverse': 'radd', 'series_examples': _add_example_SERIES}, 'sub': {'op': '-', 'desc': 'Subtraction', 'reverse': 'rsub', 'series_examples': _sub_example_SERIES}, 'mul': {'op': '*', 'desc': 'Multiplication', 'reverse': 'rmul', 'series_examples': _mul_example_SERIES, 'df_examples': None}, 'mod': {'op': '%', 'desc': 'Modulo', 'reverse': 'rmod', 'series_examples': _mod_example_SERIES}, 'pow': {'op': '**', 'desc': 'Exponential power', 'reverse': 'rpow', 'series_examples': _pow_example_SERIES, 'df_examples': None}, 'truediv': {'op': '/', 'desc': 'Floating division', 'reverse': 'rtruediv', 'series_examples': _div_example_SERIES, 'df_examples': None}, 'floordiv': {'op': '//', 'desc': 'Integer division', 'reverse': 'rfloordiv', 'series_examples': _floordiv_example_SERIES, 'df_examples': None}, 'divmod': {'op': 'divmod', 'desc': 'Integer division and modulo', 'reverse': 'rdivmod', 'series_examples': None, 'df_examples': None}, # Comparison Operators 'eq': {'op': '==', 'desc': 'Equal to', 'reverse': None, 'series_examples': None}, 'ne': {'op': '!=', 'desc': 'Not equal to', 'reverse': None, 'series_examples': None}, 'lt': {'op': '<', 'desc': 'Less than', 'reverse': None, 'series_examples': None}, 'le': {'op': '<=', 'desc': 'Less than or equal to', 'reverse': None, 'series_examples': None}, 'gt': {'op': '>', 'desc': 'Greater than', 'reverse': None, 'series_examples': None}, 'ge': {'op': '>=', 'desc': 'Greater than or equal to', 'reverse': None, 'series_examples': None} } # type: Dict[str, Dict[str, Optional[str]]] _op_names = list(_op_descriptions.keys()) for key in _op_names: reverse_op = _op_descriptions[key]['reverse'] if reverse_op is not None: _op_descriptions[reverse_op] = _op_descriptions[key].copy() _op_descriptions[reverse_op]['reverse'] = key _flex_doc_SERIES = """ Return {desc} of series and other, element-wise (binary operator `{op_name}`). Equivalent to ``{equiv}``, but with support to substitute a fill_value for missing data in one of the inputs. Parameters ---------- other : Series or scalar value fill_value : None or float value, default None (NaN) Fill existing missing (NaN) values, and any new element needed for successful Series alignment, with this value before computation. If data in both corresponding Series locations is missing the result will be missing. level : int or name Broadcast across a level, matching Index values on the passed MultiIndex level. Returns ------- Series The result of the operation. See Also -------- Series.{reverse} """ _arith_doc_FRAME = """ Binary operator %s with support to substitute a fill_value for missing data in one of the inputs Parameters ---------- other : Series, DataFrame, or constant axis : {0, 1, 'index', 'columns'} For Series input, axis to match Series index on fill_value : None or float value, default None Fill existing missing (NaN) values, and any new element needed for successful DataFrame alignment, with this value before computation. If data in both corresponding DataFrame locations is missing the result will be missing level : int or name Broadcast across a level, matching Index values on the passed MultiIndex level Returns ------- result : DataFrame Notes ----- Mismatched indices will be unioned together """ _flex_doc_FRAME = """ Get {desc} of dataframe and other, element-wise (binary operator `{op_name}`). Equivalent to ``{equiv}``, but with support to substitute a fill_value for missing data in one of the inputs. With reverse version, `{reverse}`. Among flexible wrappers (`add`, `sub`, `mul`, `div`, `mod`, `pow`) to arithmetic operators: `+`, `-`, `*`, `/`, `//`, `%`, `**`. Parameters ---------- other : scalar, sequence, Series, or DataFrame Any single or multiple element data structure, or list-like object. axis : {{0 or 'index', 1 or 'columns'}} Whether to compare by the index (0 or 'index') or columns (1 or 'columns'). For Series input, axis to match Series index on. level : int or label Broadcast across a level, matching Index values on the passed MultiIndex level. fill_value : float or None, default None Fill existing missing (NaN) values, and any new element needed for successful DataFrame alignment, with this value before computation. If data in both corresponding DataFrame locations is missing the result will be missing. Returns ------- DataFrame Result of the arithmetic operation. See Also -------- DataFrame.add : Add DataFrames. DataFrame.sub : Subtract DataFrames. DataFrame.mul : Multiply DataFrames. DataFrame.div : Divide DataFrames (float division). DataFrame.truediv : Divide DataFrames (float division). DataFrame.floordiv : Divide DataFrames (integer division). DataFrame.mod : Calculate modulo (remainder after division). DataFrame.pow : Calculate exponential power. Notes ----- Mismatched indices will be unioned together. Examples -------- >>> df = pd.DataFrame({{'angles': [0, 3, 4], ... 'degrees': [360, 180, 360]}}, ... index=['circle', 'triangle', 'rectangle']) >>> df angles degrees circle 0 360 triangle 3 180 rectangle 4 360 Add a scalar with operator version which return the same results. >>> df + 1 angles degrees circle 1 361 triangle 4 181 rectangle 5 361 >>> df.add(1) angles degrees circle 1 361 triangle 4 181 rectangle 5 361 Divide by constant with reverse version. >>> df.div(10) angles degrees circle 0.0 36.0 triangle 0.3 18.0 rectangle 0.4 36.0 >>> df.rdiv(10) angles degrees circle inf 0.027778 triangle 3.333333 0.055556 rectangle 2.500000 0.027778 Subtract a list and Series by axis with operator version. >>> df - [1, 2] angles degrees circle -1 358 triangle 2 178 rectangle 3 358 >>> df.sub([1, 2], axis='columns') angles degrees circle -1 358 triangle 2 178 rectangle 3 358 >>> df.sub(pd.Series([1, 1, 1], index=['circle', 'triangle', 'rectangle']), ... axis='index') angles degrees circle -1 359 triangle 2 179 rectangle 3 359 Multiply a DataFrame of different shape with operator version. >>> other = pd.DataFrame({{'angles': [0, 3, 4]}}, ... index=['circle', 'triangle', 'rectangle']) >>> other angles circle 0 triangle 3 rectangle 4 >>> df * other angles degrees circle 0 NaN triangle 9 NaN rectangle 16 NaN >>> df.mul(other, fill_value=0) angles degrees circle 0 0.0 triangle 9 0.0 rectangle 16 0.0 Divide by a MultiIndex by level. >>> df_multindex = pd.DataFrame({{'angles': [0, 3, 4, 4, 5, 6], ... 'degrees': [360, 180, 360, 360, 540, 720]}}, ... index=[['A', 'A', 'A', 'B', 'B', 'B'], ... ['circle', 'triangle', 'rectangle', ... 'square', 'pentagon', 'hexagon']]) >>> df_multindex angles degrees A circle 0 360 triangle 3 180 rectangle 4 360 B square 4 360 pentagon 5 540 hexagon 6 720 >>> df.div(df_multindex, level=1, fill_value=0) angles degrees A circle NaN 1.0 triangle 1.0 1.0 rectangle 1.0 1.0 B square 0.0 0.0 pentagon 0.0 0.0 hexagon 0.0 0.0 """ _flex_comp_doc_FRAME = """ Get {desc} of dataframe and other, element-wise (binary operator `{op_name}`). Among flexible wrappers (`eq`, `ne`, `le`, `lt`, `ge`, `gt`) to comparison operators. Equivalent to `==`, `=!`, `<=`, `<`, `>=`, `>` with support to choose axis (rows or columns) and level for comparison. Parameters ---------- other : scalar, sequence, Series, or DataFrame Any single or multiple element data structure, or list-like object. axis : {{0 or 'index', 1 or 'columns'}}, default 'columns' Whether to compare by the index (0 or 'index') or columns (1 or 'columns'). level : int or label Broadcast across a level, matching Index values on the passed MultiIndex level. Returns ------- DataFrame of bool Result of the comparison. See Also -------- DataFrame.eq : Compare DataFrames for equality elementwise. DataFrame.ne : Compare DataFrames for inequality elementwise. DataFrame.le : Compare DataFrames for less than inequality or equality elementwise. DataFrame.lt : Compare DataFrames for strictly less than inequality elementwise. DataFrame.ge : Compare DataFrames for greater than inequality or equality elementwise. DataFrame.gt : Compare DataFrames for strictly greater than inequality elementwise. Notes ----- Mismatched indices will be unioned together. `NaN` values are considered different (i.e. `NaN` != `NaN`). Examples -------- >>> df = pd.DataFrame({{'cost': [250, 150, 100], ... 'revenue': [100, 250, 300]}}, ... index=['A', 'B', 'C']) >>> df cost revenue A 250 100 B 150 250 C 100 300 Comparison with a scalar, using either the operator or method: >>> df == 100 cost revenue A False True B False False C True False >>> df.eq(100) cost revenue A False True B False False C True False When `other` is a :class:`Series`, the columns of a DataFrame are aligned with the index of `other` and broadcast: >>> df != pd.Series([100, 250], index=["cost", "revenue"]) cost revenue A True True B True False C False True Use the method to control the broadcast axis: >>> df.ne(pd.Series([100, 300], index=["A", "D"]), axis='index') cost revenue A True False B True True C True True D True True When comparing to an arbitrary sequence, the number of columns must match the number elements in `other`: >>> df == [250, 100] cost revenue A True True B False False C False False Use the method to control the axis: >>> df.eq([250, 250, 100], axis='index') cost revenue A True False B False True C True False Compare to a DataFrame of different shape. >>> other = pd.DataFrame({{'revenue': [300, 250, 100, 150]}}, ... index=['A', 'B', 'C', 'D']) >>> other revenue A 300 B 250 C 100 D 150 >>> df.gt(other) cost revenue A False False B False False C False True D False False Compare to a MultiIndex by level. >>> df_multindex = pd.DataFrame({{'cost': [250, 150, 100, 150, 300, 220], ... 'revenue': [100, 250, 300, 200, 175, 225]}}, ... index=[['Q1', 'Q1', 'Q1', 'Q2', 'Q2', 'Q2'], ... ['A', 'B', 'C', 'A', 'B', 'C']]) >>> df_multindex cost revenue Q1 A 250 100 B 150 250 C 100 300 Q2 A 150 200 B 300 175 C 220 225 >>> df.le(df_multindex, level=1) cost revenue Q1 A True True B True True C True True Q2 A False True B True False C True False """ def _make_flex_doc(op_name, typ): """ Make the appropriate substitutions for the given operation and class-typ into either _flex_doc_SERIES or _flex_doc_FRAME to return the docstring to attach to a generated method. Parameters ---------- op_name : str {'__add__', '__sub__', ... '__eq__', '__ne__', ...} typ : str {series, 'dataframe']} Returns ------- doc : str """ op_name = op_name.replace('__', '') op_desc = _op_descriptions[op_name] if op_name.startswith('r'): equiv = 'other ' + op_desc['op'] + ' ' + typ else: equiv = typ + ' ' + op_desc['op'] + ' other' if typ == 'series': base_doc = _flex_doc_SERIES doc_no_examples = base_doc.format( desc=op_desc['desc'], op_name=op_name, equiv=equiv, reverse=op_desc['reverse'] ) if op_desc['series_examples']: doc = doc_no_examples + op_desc['series_examples'] else: doc = doc_no_examples elif typ == 'dataframe': base_doc = _flex_doc_FRAME doc = base_doc.format( desc=op_desc['desc'], op_name=op_name, equiv=equiv, reverse=op_desc['reverse'] ) else: raise AssertionError('Invalid typ argument.') return doc # ----------------------------------------------------------------------------- # Masking NA values and fallbacks for operations numpy does not support def fill_binop(left, right, fill_value): """ If a non-None fill_value is given, replace null entries in left and right with this value, but only in positions where _one_ of left/right is null, not both. Parameters ---------- left : array-like right : array-like fill_value : object Returns ------- left : array-like right : array-like Notes ----- Makes copies if fill_value is not None """ # TODO: can we make a no-copy implementation? if fill_value is not None: left_mask = isna(left) right_mask = isna(right) left = left.copy() right = right.copy() # one but not both mask = left_mask ^ right_mask left[left_mask & mask] = fill_value right[right_mask & mask] = fill_value return left, right def mask_cmp_op(x, y, op): """ Apply the function `op` to only non-null points in x and y. Parameters ---------- x : array-like y : array-like op : binary operation Returns ------- result : ndarray[bool] """ xrav = x.ravel() result = np.empty(x.size, dtype=bool) if isinstance(y, (np.ndarray, ABCSeries)): yrav = y.ravel() mask = notna(xrav) & notna(yrav) result[mask] = op(np.array(list(xrav[mask])), np.array(list(yrav[mask]))) else: mask = notna(xrav) result[mask] = op(np.array(list(xrav[mask])), y) if op == operator.ne: # pragma: no cover np.putmask(result, ~mask, True) else: np.putmask(result, ~mask, False) result = result.reshape(x.shape) return result def masked_arith_op(x, y, op): """ If the given arithmetic operation fails, attempt it again on only the non-null elements of the input array(s). Parameters ---------- x : np.ndarray y : np.ndarray, Series, Index op : binary operator """ # For Series `x` is 1D so ravel() is a no-op; calling it anyway makes # the logic valid for both Series and DataFrame ops. xrav = x.ravel() assert isinstance(x, (np.ndarray, ABCSeries)), type(x) if isinstance(y, (np.ndarray, ABCSeries, ABCIndexClass)): dtype = find_common_type([x.dtype, y.dtype]) result = np.empty(x.size, dtype=dtype) # PeriodIndex.ravel() returns int64 dtype, so we have # to work around that case. See GH#19956 yrav = y if is_period_dtype(y) else y.ravel() mask = notna(xrav) & notna(yrav) if yrav.shape != mask.shape: # FIXME: GH#5284, GH#5035, GH#19448 # Without specifically raising here we get mismatched # errors in Py3 (TypeError) vs Py2 (ValueError) # Note: Only = an issue in DataFrame case raise ValueError('Cannot broadcast operands together.') if mask.any(): with np.errstate(all='ignore'): result[mask] = op(xrav[mask], com.values_from_object(yrav[mask])) else: assert is_scalar(y), type(y) assert isinstance(x, np.ndarray), type(x) # mask is only meaningful for x result = np.empty(x.size, dtype=x.dtype) mask = notna(xrav) # 1 ** np.nan is 1. So we have to unmask those. if op == pow: mask = np.where(x == 1, False, mask) elif op == rpow: mask = np.where(y == 1, False, mask) if mask.any(): with np.errstate(all='ignore'): result[mask] = op(xrav[mask], y) result, changed = maybe_upcast_putmask(result, ~mask, np.nan) result = result.reshape(x.shape) # 2D compat return result def invalid_comparison(left, right, op): """ If a comparison has mismatched types and is not necessarily meaningful, follow python3 conventions by: - returning all-False for equality - returning all-True for inequality - raising TypeError otherwise Parameters ---------- left : array-like right : scalar, array-like op : operator.{eq, ne, lt, le, gt} Raises ------ TypeError : on inequality comparisons """ if op is operator.eq: res_values = np.zeros(left.shape, dtype=bool) elif op is operator.ne: res_values = np.ones(left.shape, dtype=bool) else: raise TypeError("Invalid comparison between dtype={dtype} and {typ}" .format(dtype=left.dtype, typ=type(right).__name__)) return res_values # ----------------------------------------------------------------------------- # Dispatch logic def should_series_dispatch(left, right, op): """ Identify cases where a DataFrame operation should dispatch to its Series counterpart. Parameters ---------- left : DataFrame right : DataFrame op : binary operator Returns ------- override : bool """ if left._is_mixed_type or right._is_mixed_type: return True if not len(left.columns) or not len(right.columns): # ensure obj.dtypes[0] exists for each obj return False ldtype = left.dtypes.iloc[0] rdtype = right.dtypes.iloc[0] if ((is_timedelta64_dtype(ldtype) and is_integer_dtype(rdtype)) or (is_timedelta64_dtype(rdtype) and is_integer_dtype(ldtype))): # numpy integer dtypes as timedelta64 dtypes in this scenario return True if is_datetime64_dtype(ldtype) and is_object_dtype(rdtype): # in particular case where right is an array of DateOffsets return True return False def dispatch_to_series(left, right, func, str_rep=None, axis=None): """ Evaluate the frame operation func(left, right) by evaluating column-by-column, dispatching to the Series implementation. Parameters ---------- left : DataFrame right : scalar or DataFrame func : arithmetic or comparison operator str_rep : str or None, default None axis : {None, 0, 1, "index", "columns"} Returns ------- DataFrame """ # Note: we use iloc to access columns for compat with cases # with non-unique columns. import pandas.core.computation.expressions as expressions right = lib.item_from_zerodim(right) if lib.is_scalar(right) or np.ndim(right) == 0: def column_op(a, b): return {i: func(a.iloc[:, i], b) for i in range(len(a.columns))} elif isinstance(right, ABCDataFrame): assert right._indexed_same(left) def column_op(a, b): return {i: func(a.iloc[:, i], b.iloc[:, i]) for i in range(len(a.columns))} elif isinstance(right, ABCSeries) and axis == "columns": # We only get here if called via left._combine_match_columns, # in which case we specifically want to operate row-by-row assert right.index.equals(left.columns) def column_op(a, b): return {i: func(a.iloc[:, i], b.iloc[i]) for i in range(len(a.columns))} elif isinstance(right, ABCSeries): assert right.index.equals(left.index) # Handle other cases later def column_op(a, b): return {i: func(a.iloc[:, i], b) for i in range(len(a.columns))} else: # Remaining cases have less-obvious dispatch rules raise NotImplementedError(right) new_data = expressions.evaluate(column_op, str_rep, left, right) result = left._constructor(new_data, index=left.index, copy=False) # Pin columns instead of passing to constructor for compat with # non-unique columns case result.columns = left.columns return result def dispatch_to_index_op(op, left, right, index_class): """ Wrap Series left in the given index_class to delegate the operation op to the index implementation. DatetimeIndex and TimedeltaIndex perform type checking, timezone handling, overflow checks, etc. Parameters ---------- op : binary operator (operator.add, operator.sub, ...) left : Series right : object index_class : DatetimeIndex or TimedeltaIndex Returns ------- result : object, usually DatetimeIndex, TimedeltaIndex, or Series """ left_idx = index_class(left) # avoid accidentally allowing integer add/sub. For datetime64[tz] dtypes, # left_idx may inherit a freq from a cached DatetimeIndex. # See discussion in GH#19147. if getattr(left_idx, 'freq', None) is not None: left_idx = left_idx._shallow_copy(freq=None) try: result = op(left_idx, right) except NullFrequencyError: # DatetimeIndex and TimedeltaIndex with freq == None raise ValueError # on add/sub of integers (or int-like). We re-raise as a TypeError. raise TypeError('incompatible type for a datetime/timedelta ' 'operation [{name}]'.format(name=op.__name__)) return result def dispatch_to_extension_op(op, left, right): """ Assume that left or right is a Series backed by an ExtensionArray, apply the operator defined by op. """ # The op calls will raise TypeError if the op is not defined # on the ExtensionArray # unbox Series and Index to arrays if isinstance(left, (ABCSeries, ABCIndexClass)): new_left = left._values else: new_left = left if isinstance(right, (ABCSeries, ABCIndexClass)): new_right = right._values else: new_right = right res_values = op(new_left, new_right) res_name = get_op_result_name(left, right) if op.__name__ in ['divmod', 'rdivmod']: return _construct_divmod_result( left, res_values, left.index, res_name) return _construct_result(left, res_values, left.index, res_name) # ----------------------------------------------------------------------------- # Functions that add arithmetic methods to objects, given arithmetic factory # methods def _get_method_wrappers(cls): """ Find the appropriate operation-wrappers to use when defining flex/special arithmetic, boolean, and comparison operations with the given class. Parameters ---------- cls : class Returns ------- arith_flex : function or None comp_flex : function or None arith_special : function comp_special : function bool_special : function Notes ----- None is only returned for SparseArray """ if issubclass(cls, ABCSparseSeries): # Be sure to catch this before ABCSeries and ABCSparseArray, # as they will both come see SparseSeries as a subclass arith_flex = _flex_method_SERIES comp_flex = _flex_method_SERIES arith_special = _arith_method_SPARSE_SERIES comp_special = _arith_method_SPARSE_SERIES bool_special = _bool_method_SERIES # TODO: I don't think the functions defined by bool_method are tested elif issubclass(cls, ABCSeries): # Just Series; SparseSeries is caught above arith_flex = _flex_method_SERIES comp_flex = _flex_method_SERIES arith_special = _arith_method_SERIES comp_special = _comp_method_SERIES bool_special = _bool_method_SERIES elif issubclass(cls, ABCSparseArray): arith_flex = None comp_flex = None arith_special = _arith_method_SPARSE_ARRAY comp_special = _arith_method_SPARSE_ARRAY bool_special = _arith_method_SPARSE_ARRAY elif issubclass(cls, ABCDataFrame): # Same for DataFrame and SparseDataFrame arith_flex = _arith_method_FRAME comp_flex = _flex_comp_method_FRAME arith_special = _arith_method_FRAME comp_special = _comp_method_FRAME bool_special = _arith_method_FRAME return arith_flex, comp_flex, arith_special, comp_special, bool_special def _create_methods(cls, arith_method, comp_method, bool_method, special): # creates actual methods based upon arithmetic, comp and bool method # constructors. have_divmod = issubclass(cls, ABCSeries) # divmod is available for Series and SparseSeries # yapf: disable new_methods = dict( add=arith_method(cls, operator.add, special), radd=arith_method(cls, radd, special), sub=arith_method(cls, operator.sub, special), mul=arith_method(cls, operator.mul, special), truediv=arith_method(cls, operator.truediv, special), floordiv=arith_method(cls, operator.floordiv, special), # Causes a floating point exception in the tests when numexpr enabled, # so for now no speedup mod=arith_method(cls, operator.mod, special), pow=arith_method(cls, operator.pow, special), # not entirely sure why this is necessary, but previously was included # so it's here to maintain compatibility rmul=arith_method(cls, rmul, special), rsub=arith_method(cls, rsub, special), rtruediv=arith_method(cls, rtruediv, special), rfloordiv=arith_method(cls, rfloordiv, special), rpow=arith_method(cls, rpow, special), rmod=arith_method(cls, rmod, special)) # yapf: enable new_methods['div'] = new_methods['truediv'] new_methods['rdiv'] = new_methods['rtruediv'] if have_divmod: # divmod doesn't have an op that is supported by numexpr new_methods['divmod'] = arith_method(cls, divmod, special) new_methods['rdivmod'] = arith_method(cls, rdivmod, special) new_methods.update(dict( eq=comp_method(cls, operator.eq, special), ne=comp_method(cls, operator.ne, special), lt=comp_method(cls, operator.lt, special), gt=comp_method(cls, operator.gt, special), le=comp_method(cls, operator.le, special), ge=comp_method(cls, operator.ge, special))) if bool_method: new_methods.update( dict(and_=bool_method(cls, operator.and_, special), or_=bool_method(cls, operator.or_, special), # For some reason ``^`` wasn't used in original. xor=bool_method(cls, operator.xor, special), rand_=bool_method(cls, rand_, special), ror_=bool_method(cls, ror_, special), rxor=bool_method(cls, rxor, special))) if special: dunderize = lambda x: '__{name}__'.format(name=x.strip('_')) else: dunderize = lambda x: x new_methods = {dunderize(k): v for k, v in new_methods.items()} return new_methods def add_methods(cls, new_methods): for name, method in new_methods.items(): # For most methods, if we find that the class already has a method # of the same name, it is OK to over-write it. The exception is # inplace methods (__iadd__, __isub__, ...) for SparseArray, which # retain the np.ndarray versions. force = not (issubclass(cls, ABCSparseArray) and name.startswith('__i')) if force or name not in cls.__dict__: setattr(cls, name, method) # ---------------------------------------------------------------------- # Arithmetic def add_special_arithmetic_methods(cls): """ Adds the full suite of special arithmetic methods (``__add__``, ``__sub__``, etc.) to the class. Parameters ---------- cls : class special methods will be defined and pinned to this class """ _, _, arith_method, comp_method, bool_method = _get_method_wrappers(cls) new_methods = _create_methods(cls, arith_method, comp_method, bool_method, special=True) # inplace operators (I feel like these should get passed an `inplace=True` # or just be removed def _wrap_inplace_method(method): """ return an inplace wrapper for this method """ def f(self, other): result = method(self, other) # this makes sure that we are aligned like the input # we are updating inplace so we want to ignore is_copy self._update_inplace(result.reindex_like(self, copy=False)._data, verify_is_copy=False) return self f.__name__ = "__i{name}__".format(name=method.__name__.strip("__")) return f new_methods.update( dict(__iadd__=_wrap_inplace_method(new_methods["__add__"]), __isub__=_wrap_inplace_method(new_methods["__sub__"]), __imul__=_wrap_inplace_method(new_methods["__mul__"]), __itruediv__=_wrap_inplace_method(new_methods["__truediv__"]), __ifloordiv__=_wrap_inplace_method(new_methods["__floordiv__"]), __imod__=_wrap_inplace_method(new_methods["__mod__"]), __ipow__=_wrap_inplace_method(new_methods["__pow__"]))) new_methods.update( dict(__iand__=_wrap_inplace_method(new_methods["__and__"]), __ior__=_wrap_inplace_method(new_methods["__or__"]), __ixor__=_wrap_inplace_method(new_methods["__xor__"]))) add_methods(cls, new_methods=new_methods) def add_flex_arithmetic_methods(cls): """ Adds the full suite of flex arithmetic methods (``pow``, ``mul``, ``add``) to the class. Parameters ---------- cls : class flex methods will be defined and pinned to this class """ flex_arith_method, flex_comp_method, _, _, _ = _get_method_wrappers(cls) new_methods = _create_methods(cls, flex_arith_method, flex_comp_method, bool_method=None, special=False) new_methods.update(dict(multiply=new_methods['mul'], subtract=new_methods['sub'], divide=new_methods['div'])) # opt out of bool flex methods for now assert not any(kname in new_methods for kname in ('ror_', 'rxor', 'rand_')) add_methods(cls, new_methods=new_methods) # ----------------------------------------------------------------------------- # Series def _align_method_SERIES(left, right, align_asobject=False): """ align lhs and rhs Series """ # ToDo: Different from _align_method_FRAME, list, tuple and ndarray # are not coerced here # because Series has inconsistencies described in #13637 if isinstance(right, ABCSeries): # avoid repeated alignment if not left.index.equals(right.index): if align_asobject: # to keep original value's dtype for bool ops left = left.astype(object) right = right.astype(object) left, right = left.align(right, copy=False) return left, right def _construct_result(left, result, index, name, dtype=None): """ If the raw op result has a non-None name (e.g. it is an Index object) and the name argument is None, then passing name to the constructor will not be enough; we still need to override the name attribute. """ out = left._constructor(result, index=index, dtype=dtype) out = out.__finalize__(left) out.name = name return out def _construct_divmod_result(left, result, index, name, dtype=None): """divmod returns a tuple of like indexed series instead of a single series. """ return ( _construct_result(left, result[0], index=index, name=name, dtype=dtype), _construct_result(left, result[1], index=index, name=name, dtype=dtype), ) def _arith_method_SERIES(cls, op, special): """ Wrapper function for Series arithmetic operations, to avoid code duplication. """ str_rep = _get_opstr(op, cls) op_name = _get_op_name(op, special) eval_kwargs = _gen_eval_kwargs(op_name) fill_zeros = _gen_fill_zeros(op_name) construct_result = (_construct_divmod_result if op in [divmod, rdivmod] else _construct_result) def na_op(x, y): """ Return the result of evaluating op on the passed in values. If native types are not compatible, try coersion to object dtype. Parameters ---------- x : array-like y : array-like or scalar Returns ------- array-like Raises ------ TypeError : invalid operation """ import pandas.core.computation.expressions as expressions try: result = expressions.evaluate(op, str_rep, x, y, **eval_kwargs) except TypeError: result = masked_arith_op(x, y, op) except Exception: # TODO: more specific? if is_object_dtype(x): return libalgos.arrmap_object(x, lambda val: op(val, y)) raise result = missing.fill_zeros(result, x, y, op_name, fill_zeros) return result def wrapper(left, right): if isinstance(right, ABCDataFrame): return NotImplemented left, right = _align_method_SERIES(left, right) res_name = get_op_result_name(left, right) right = maybe_upcast_for_op(right) if is_categorical_dtype(left): raise TypeError("{typ} cannot perform the operation " "{op}".format(typ=type(left).__name__, op=str_rep)) elif is_datetime64_dtype(left) or is_datetime64tz_dtype(left): # Give dispatch_to_index_op a chance for tests like # test_dt64_series_add_intlike, which the index dispatching handles # specifically. result = dispatch_to_index_op(op, left, right, pd.DatetimeIndex) return construct_result(left, result, index=left.index, name=res_name, dtype=result.dtype) elif (is_extension_array_dtype(left) or (is_extension_array_dtype(right) and not is_scalar(right))): # GH#22378 disallow scalar to exclude e.g. "category", "Int64" return dispatch_to_extension_op(op, left, right) elif is_timedelta64_dtype(left): result = dispatch_to_index_op(op, left, right, pd.TimedeltaIndex) return construct_result(left, result, index=left.index, name=res_name) elif is_timedelta64_dtype(right): # We should only get here with non-scalar or timedelta64('NaT') # values for right # Note: we cannot use dispatch_to_index_op because # that may incorrectly raise TypeError when we # should get NullFrequencyError result = op(pd.Index(left), right) return construct_result(left, result, index=left.index, name=res_name, dtype=result.dtype) lvalues = left.values rvalues = right if isinstance(rvalues, ABCSeries): rvalues = rvalues.values with np.errstate(all='ignore'): result = na_op(lvalues, rvalues) return construct_result(left, result, index=left.index, name=res_name, dtype=None) wrapper.__name__ = op_name return wrapper def _comp_method_OBJECT_ARRAY(op, x, y): if isinstance(y, list): y = construct_1d_object_array_from_listlike(y) if isinstance(y, (np.ndarray, ABCSeries, ABCIndex)): if not is_object_dtype(y.dtype): y = y.astype(np.object_) if isinstance(y, (ABCSeries, ABCIndex)): y = y.values result = libops.vec_compare(x, y, op) else: result = libops.scalar_compare(x, y, op) return result def _comp_method_SERIES(cls, op, special): """ Wrapper function for Series arithmetic operations, to avoid code duplication. """ op_name = _get_op_name(op, special) masker = _gen_eval_kwargs(op_name).get('masker', False) def na_op(x, y): # TODO: # should have guarantess on what x, y can be type-wise # Extension Dtypes are not called here # Checking that cases that were once handled here are no longer # reachable. assert not (is_categorical_dtype(y) and not is_scalar(y)) if is_object_dtype(x.dtype): result = _comp_method_OBJECT_ARRAY(op, x, y) elif is_datetimelike_v_numeric(x, y): return invalid_comparison(x, y, op) else: # we want to compare like types # we only want to convert to integer like if # we are not NotImplemented, otherwise # we would allow datetime64 (but viewed as i8) against # integer comparisons # we have a datetime/timedelta and may need to convert assert not needs_i8_conversion(x) mask = None if not is_scalar(y) and needs_i8_conversion(y): mask = isna(x) | isna(y) y = y.view('i8') x = x.view('i8') method = getattr(x, op_name, None) if method is not None: with np.errstate(all='ignore'): result = method(y) if result is NotImplemented: return invalid_comparison(x, y, op) else: result = op(x, y) if mask is not None and mask.any(): result[mask] = masker return result def wrapper(self, other, axis=None): # Validate the axis parameter if axis is not None: self._get_axis_number(axis) res_name = get_op_result_name(self, other) if isinstance(other, list): # TODO: same for tuples? other = np.asarray(other) if isinstance(other, ABCDataFrame): # pragma: no cover # Defer to DataFrame implementation; fail early return NotImplemented elif isinstance(other, ABCSeries) and not self._indexed_same(other): raise ValueError("Can only compare identically-labeled " "Series objects") elif is_categorical_dtype(self): # Dispatch to Categorical implementation; pd.CategoricalIndex # behavior is non-canonical GH#19513 res_values = dispatch_to_index_op(op, self, other, pd.Categorical) return self._constructor(res_values, index=self.index, name=res_name) elif is_datetime64_dtype(self) or is_datetime64tz_dtype(self): # Dispatch to DatetimeIndex to ensure identical # Series/Index behavior if (isinstance(other, datetime.date) and not isinstance(other, datetime.datetime)): # https://github.com/pandas-dev/pandas/issues/21152 # Compatibility for difference between Series comparison w/ # datetime and date msg = ( "Comparing Series of datetimes with 'datetime.date'. " "Currently, the 'datetime.date' is coerced to a " "datetime. In the future pandas will not coerce, " "and {future}. " "To retain the current behavior, " "convert the 'datetime.date' to a datetime with " "'pd.Timestamp'." ) if op in {operator.lt, operator.le, operator.gt, operator.ge}: future = "a TypeError will be raised" else: future = ( "'the values will not compare equal to the " "'datetime.date'" ) msg = '\n'.join(textwrap.wrap(msg.format(future=future))) warnings.warn(msg, FutureWarning, stacklevel=2) other = pd.Timestamp(other) res_values = dispatch_to_index_op(op, self, other, pd.DatetimeIndex) return self._constructor(res_values, index=self.index, name=res_name) elif is_timedelta64_dtype(self): res_values = dispatch_to_index_op(op, self, other, pd.TimedeltaIndex) return self._constructor(res_values, index=self.index, name=res_name) elif (is_extension_array_dtype(self) or (is_extension_array_dtype(other) and not is_scalar(other))): # Note: the `not is_scalar(other)` condition rules out # e.g. other == "category" return dispatch_to_extension_op(op, self, other) elif isinstance(other, ABCSeries): # By this point we have checked that self._indexed_same(other) res_values = na_op(self.values, other.values) # rename is needed in case res_name is None and res_values.name # is not. return self._constructor(res_values, index=self.index, name=res_name).rename(res_name) elif isinstance(other, (np.ndarray, pd.Index)): # do not check length of zerodim array # as it will broadcast if other.ndim != 0 and len(self) != len(other): raise ValueError('Lengths must match to compare') res_values = na_op(self.values, np.asarray(other)) result = self._constructor(res_values, index=self.index) # rename is needed in case res_name is None and self.name # is not. return result.__finalize__(self).rename(res_name) elif is_scalar(other) and isna(other): # numpy does not like comparisons vs None if op is operator.ne: res_values = np.ones(len(self), dtype=bool) else: res_values = np.zeros(len(self), dtype=bool) return self._constructor(res_values, index=self.index, name=res_name, dtype='bool') else: values = self.get_values() with np.errstate(all='ignore'): res = na_op(values, other) if is_scalar(res): raise TypeError('Could not compare {typ} type with Series' .format(typ=type(other))) # always return a full value series here res_values = com.values_from_object(res) return self._constructor(res_values, index=self.index, name=res_name, dtype='bool') wrapper.__name__ = op_name return wrapper def _bool_method_SERIES(cls, op, special): """ Wrapper function for Series arithmetic operations, to avoid code duplication. """ op_name = _get_op_name(op, special) def na_op(x, y): try: result = op(x, y) except TypeError: assert not isinstance(y, (list, ABCSeries, ABCIndexClass)) if isinstance(y, np.ndarray): # bool-bool dtype operations should be OK, should not get here assert not (is_bool_dtype(x) and is_bool_dtype(y)) x = ensure_object(x) y = ensure_object(y) result = libops.vec_binop(x, y, op) else: # let null fall thru assert lib.is_scalar(y) if not isna(y): y = bool(y) try: result = libops.scalar_binop(x, y, op) except (TypeError, ValueError, AttributeError, OverflowError, NotImplementedError): raise TypeError("cannot compare a dtyped [{dtype}] array " "with a scalar of type [{typ}]" .format(dtype=x.dtype, typ=type(y).__name__)) return result fill_int = lambda x: x.fillna(0) fill_bool = lambda x: x.fillna(False).astype(bool) def wrapper(self, other): is_self_int_dtype = is_integer_dtype(self.dtype) self, other = _align_method_SERIES(self, other, align_asobject=True) res_name = get_op_result_name(self, other) if isinstance(other, ABCDataFrame): # Defer to DataFrame implementation; fail early return NotImplemented elif isinstance(other, (ABCSeries, ABCIndexClass)): is_other_int_dtype = is_integer_dtype(other.dtype) other = fill_int(other) if is_other_int_dtype else fill_bool(other) ovalues = other.values finalizer = lambda x: x else: # scalars, list, tuple, np.array is_other_int_dtype = is_integer_dtype(np.asarray(other)) if is_list_like(other) and not isinstance(other, np.ndarray): # TODO: Can we do this before the is_integer_dtype check? # could the is_integer_dtype check be checking the wrong # thing? e.g. other = [[0, 1], [2, 3], [4, 5]]? other = construct_1d_object_array_from_listlike(other) ovalues = other finalizer = lambda x: x.__finalize__(self) # For int vs int `^`, `|`, `&` are bitwise operators and return # integer dtypes. Otherwise these are boolean ops filler = (fill_int if is_self_int_dtype and is_other_int_dtype else fill_bool) res_values = na_op(self.values, ovalues) unfilled = self._constructor(res_values, index=self.index, name=res_name) filled = filler(unfilled) return finalizer(filled) wrapper.__name__ = op_name return wrapper def _flex_method_SERIES(cls, op, special): name = _get_op_name(op, special) doc = _make_flex_doc(name, 'series') @Appender(doc) def flex_wrapper(self, other, level=None, fill_value=None, axis=0): # validate axis if axis is not None: self._get_axis_number(axis) if isinstance(other, ABCSeries): return self._binop(other, op, level=level, fill_value=fill_value) elif isinstance(other, (np.ndarray, list, tuple)): if len(other) != len(self): raise ValueError('Lengths must be equal') other = self._constructor(other, self.index) return self._binop(other, op, level=level, fill_value=fill_value) else: if fill_value is not None: self = self.fillna(fill_value) return self._constructor(op(self, other), self.index).__finalize__(self) flex_wrapper.__name__ = name return flex_wrapper # ----------------------------------------------------------------------------- # DataFrame def _combine_series_frame(self, other, func, fill_value=None, axis=None, level=None): """ Apply binary operator `func` to self, other using alignment and fill conventions determined by the fill_value, axis, and level kwargs. Parameters ---------- self : DataFrame other : Series func : binary operator fill_value : object, default None axis : {0, 1, 'columns', 'index', None}, default None level : int or None, default None Returns ------- result : DataFrame """ if fill_value is not None: raise NotImplementedError("fill_value {fill} not supported." .format(fill=fill_value)) if axis is not None: axis = self._get_axis_number(axis) if axis == 0: return self._combine_match_index(other, func, level=level) else: return self._combine_match_columns(other, func, level=level) else: if not len(other): return self * np.nan if not len(self): # Ambiguous case, use _series so works with DataFrame return self._constructor(data=self._series, index=self.index, columns=self.columns) # default axis is columns return self._combine_match_columns(other, func, level=level) def _align_method_FRAME(left, right, axis): """ convert rhs to meet lhs dims if input is list, tuple or np.ndarray """ def to_series(right): msg = ('Unable to coerce to Series, length must be {req_len}: ' 'given {given_len}') if axis is not None and left._get_axis_name(axis) == 'index': if len(left.index) != len(right): raise ValueError(msg.format(req_len=len(left.index), given_len=len(right))) right = left._constructor_sliced(right, index=left.index) else: if len(left.columns) != len(right): raise ValueError(msg.format(req_len=len(left.columns), given_len=len(right))) right = left._constructor_sliced(right, index=left.columns) return right if isinstance(right, np.ndarray): if right.ndim == 1: right = to_series(right) elif right.ndim == 2: if right.shape == left.shape: right = left._constructor(right, index=left.index, columns=left.columns) elif right.shape[0] == left.shape[0] and right.shape[1] == 1: # Broadcast across columns right = np.broadcast_to(right, left.shape) right = left._constructor(right, index=left.index, columns=left.columns) elif right.shape[1] == left.shape[1] and right.shape[0] == 1: # Broadcast along rows right = to_series(right[0, :]) else: raise ValueError("Unable to coerce to DataFrame, shape " "must be {req_shape}: given {given_shape}" .format(req_shape=left.shape, given_shape=right.shape)) elif right.ndim > 2: raise ValueError('Unable to coerce to Series/DataFrame, dim ' 'must be <= 2: {dim}'.format(dim=right.shape)) elif (is_list_like(right) and not isinstance(right, (ABCSeries, ABCDataFrame))): # GH17901 right = to_series(right) return right def _arith_method_FRAME(cls, op, special): str_rep = _get_opstr(op, cls) op_name = _get_op_name(op, special) eval_kwargs = _gen_eval_kwargs(op_name) fill_zeros = _gen_fill_zeros(op_name) default_axis = _get_frame_op_default_axis(op_name) def na_op(x, y): import pandas.core.computation.expressions as expressions try: result = expressions.evaluate(op, str_rep, x, y, **eval_kwargs) except TypeError: result = masked_arith_op(x, y, op) result = missing.fill_zeros(result, x, y, op_name, fill_zeros) return result if op_name in _op_descriptions: # i.e. include "add" but not "__add__" doc = _make_flex_doc(op_name, 'dataframe') else: doc = _arith_doc_FRAME % op_name @Appender(doc) def f(self, other, axis=default_axis, level=None, fill_value=None): other = _align_method_FRAME(self, other, axis) if isinstance(other, ABCDataFrame): # Another DataFrame pass_op = op if should_series_dispatch(self, other, op) else na_op return self._combine_frame(other, pass_op, fill_value, level) elif isinstance(other, ABCSeries): # For these values of `axis`, we end up dispatching to Series op, # so do not want the masked op. pass_op = op if axis in [0, "columns", None] else na_op return _combine_series_frame(self, other, pass_op, fill_value=fill_value, axis=axis, level=level) else: if fill_value is not None: self = self.fillna(fill_value) assert np.ndim(other) == 0 return self._combine_const(other, op) f.__name__ = op_name return f def _flex_comp_method_FRAME(cls, op, special): str_rep = _get_opstr(op, cls) op_name = _get_op_name(op, special) default_axis = _get_frame_op_default_axis(op_name) def na_op(x, y): try: with np.errstate(invalid='ignore'): result = op(x, y) except TypeError: result = mask_cmp_op(x, y, op) return result doc = _flex_comp_doc_FRAME.format(op_name=op_name, desc=_op_descriptions[op_name]['desc']) @Appender(doc) def f(self, other, axis=default_axis, level=None): other = _align_method_FRAME(self, other, axis) if isinstance(other, ABCDataFrame): # Another DataFrame if not self._indexed_same(other): self, other = self.align(other, 'outer', level=level, copy=False) return dispatch_to_series(self, other, na_op, str_rep) elif isinstance(other, ABCSeries): return _combine_series_frame(self, other, na_op, fill_value=None, axis=axis, level=level) else: assert np.ndim(other) == 0, other return self._combine_const(other, na_op) f.__name__ = op_name return f def _comp_method_FRAME(cls, func, special): str_rep = _get_opstr(func, cls) op_name = _get_op_name(func, special) @Appender('Wrapper for comparison method {name}'.format(name=op_name)) def f(self, other): other = _align_method_FRAME(self, other, axis=None) if isinstance(other, ABCDataFrame): # Another DataFrame if not self._indexed_same(other): raise ValueError('Can only compare identically-labeled ' 'DataFrame objects') return dispatch_to_series(self, other, func, str_rep) elif isinstance(other, ABCSeries): return _combine_series_frame(self, other, func, fill_value=None, axis=None, level=None) else: # straight boolean comparisons we want to allow all columns # (regardless of dtype to pass thru) See #4537 for discussion. res = self._combine_const(other, func) return res.fillna(True).astype(bool) f.__name__ = op_name return f # ----------------------------------------------------------------------------- # Sparse def _cast_sparse_series_op(left, right, opname): """ For SparseSeries operation, coerce to float64 if the result is expected to have NaN or inf values Parameters ---------- left : SparseArray right : SparseArray opname : str Returns ------- left : SparseArray right : SparseArray """ from pandas.core.sparse.api import SparseDtype opname = opname.strip('_') # TODO: This should be moved to the array? if is_integer_dtype(left) and is_integer_dtype(right): # series coerces to float64 if result should have NaN/inf if opname in ('floordiv', 'mod') and (right.to_dense() == 0).any(): left = left.astype(SparseDtype(np.float64, left.fill_value)) right = right.astype(SparseDtype(np.float64, right.fill_value)) elif opname in ('rfloordiv', 'rmod') and (left.to_dense() == 0).any(): left = left.astype(SparseDtype(np.float64, left.fill_value)) right = right.astype(SparseDtype(np.float64, right.fill_value)) return left, right def _arith_method_SPARSE_SERIES(cls, op, special): """ Wrapper function for Series arithmetic operations, to avoid code duplication. """ op_name = _get_op_name(op, special) def wrapper(self, other): if isinstance(other, ABCDataFrame): return NotImplemented elif isinstance(other, ABCSeries): if not isinstance(other, ABCSparseSeries): other = other.to_sparse(fill_value=self.fill_value) return _sparse_series_op(self, other, op, op_name) elif is_scalar(other): with np.errstate(all='ignore'): new_values = op(self.values, other) return self._constructor(new_values, index=self.index, name=self.name) else: # pragma: no cover raise TypeError('operation with {other} not supported' .format(other=type(other))) wrapper.__name__ = op_name return wrapper def _sparse_series_op(left, right, op, name): left, right = left.align(right, join='outer', copy=False) new_index = left.index new_name = get_op_result_name(left, right) from pandas.core.arrays.sparse import _sparse_array_op lvalues, rvalues = _cast_sparse_series_op(left.values, right.values, name) result = _sparse_array_op(lvalues, rvalues, op, name) return left._constructor(result, index=new_index, name=new_name) def _arith_method_SPARSE_ARRAY(cls, op, special): """ Wrapper function for Series arithmetic operations, to avoid code duplication. """ op_name = _get_op_name(op, special) def wrapper(self, other): from pandas.core.arrays.sparse.array import ( SparseArray, _sparse_array_op, _wrap_result, _get_fill) if isinstance(other, np.ndarray): if len(self) != len(other): raise AssertionError("length mismatch: {self} vs. {other}" .format(self=len(self), other=len(other))) if not isinstance(other, SparseArray): dtype = getattr(other, 'dtype', None) other = SparseArray(other, fill_value=self.fill_value, dtype=dtype) return _sparse_array_op(self, other, op, op_name) elif is_scalar(other): with np.errstate(all='ignore'): fill = op(_get_fill(self), np.asarray(other)) result = op(self.sp_values, other) return _wrap_result(op_name, result, self.sp_index, fill) else: # pragma: no cover raise TypeError('operation with {other} not supported' .format(other=type(other))) wrapper.__name__ = op_name return wrapper
import pytest import pandas as pd from pandas import TimedeltaIndex class TestTimedeltaIndexRendering: @pytest.mark.parametrize('method', ['__repr__', '__str__']) def test_representation(self, method): idx1 = TimedeltaIndex([], freq='D') idx2 = TimedeltaIndex(['1 days'], freq='D') idx3 = TimedeltaIndex(['1 days', '2 days'], freq='D') idx4 = TimedeltaIndex(['1 days', '2 days', '3 days'], freq='D') idx5 = TimedeltaIndex(['1 days 00:00:01', '2 days', '3 days']) exp1 = """TimedeltaIndex([], dtype='timedelta64[ns]', freq='D')""" exp2 = ("TimedeltaIndex(['1 days'], dtype='timedelta64[ns]', " "freq='D')") exp3 = ("TimedeltaIndex(['1 days', '2 days'], " "dtype='timedelta64[ns]', freq='D')") exp4 = ("TimedeltaIndex(['1 days', '2 days', '3 days'], " "dtype='timedelta64[ns]', freq='D')") exp5 = ("TimedeltaIndex(['1 days 00:00:01', '2 days 00:00:00', " "'3 days 00:00:00'], dtype='timedelta64[ns]', freq=None)") with pd.option_context('display.width', 300): for idx, expected in zip([idx1, idx2, idx3, idx4, idx5], [exp1, exp2, exp3, exp4, exp5]): result = getattr(idx, method)() assert result == expected def test_representation_to_series(self): idx1 = TimedeltaIndex([], freq='D') idx2 = TimedeltaIndex(['1 days'], freq='D') idx3 = TimedeltaIndex(['1 days', '2 days'], freq='D') idx4 = TimedeltaIndex(['1 days', '2 days', '3 days'], freq='D') idx5 = TimedeltaIndex(['1 days 00:00:01', '2 days', '3 days']) exp1 = """Series([], dtype: timedelta64[ns])""" exp2 = ("0 1 days\n" "dtype: timedelta64[ns]") exp3 = ("0 1 days\n" "1 2 days\n" "dtype: timedelta64[ns]") exp4 = ("0 1 days\n" "1 2 days\n" "2 3 days\n" "dtype: timedelta64[ns]") exp5 = ("0 1 days 00:00:01\n" "1 2 days 00:00:00\n" "2 3 days 00:00:00\n" "dtype: timedelta64[ns]") with pd.option_context('display.width', 300): for idx, expected in zip([idx1, idx2, idx3, idx4, idx5], [exp1, exp2, exp3, exp4, exp5]): result = repr(pd.Series(idx)) assert result == expected def test_summary(self): # GH#9116 idx1 = TimedeltaIndex([], freq='D') idx2 = TimedeltaIndex(['1 days'], freq='D') idx3 = TimedeltaIndex(['1 days', '2 days'], freq='D') idx4 = TimedeltaIndex(['1 days', '2 days', '3 days'], freq='D') idx5 = TimedeltaIndex(['1 days 00:00:01', '2 days', '3 days']) exp1 = ("TimedeltaIndex: 0 entries\n" "Freq: D") exp2 = ("TimedeltaIndex: 1 entries, 1 days to 1 days\n" "Freq: D") exp3 = ("TimedeltaIndex: 2 entries, 1 days to 2 days\n" "Freq: D") exp4 = ("TimedeltaIndex: 3 entries, 1 days to 3 days\n" "Freq: D") exp5 = ("TimedeltaIndex: 3 entries, 1 days 00:00:01 to 3 days " "00:00:00") for idx, expected in zip([idx1, idx2, idx3, idx4, idx5], [exp1, exp2, exp3, exp4, exp5]): result = idx._summary() assert result == expected
cbertinato/pandas
pandas/tests/indexes/timedeltas/test_formats.py
pandas/core/ops.py
# -*- coding: utf-8 -*- from collections import Mapping from copy import copy from functools import partial from cached_property import cached_property from navmazing import NavigateToSibling, NavigateToAttribute from widgetastic.utils import ParametrizedLocator, ParametrizedString from widgetastic.widget import Text, ParametrizedView from widgetastic_manageiq import Select from widgetastic_patternfly import Input, Button, Dropdown from cfme.base.login import BaseLoggedInPage import cfme.fixtures.pytest_selenium as sel import cfme.web_ui.accordion as accordion from cfme.web_ui import Form, Tree, fill, flash, form_buttons, match_location, Select as Select_old from utils.appliance import Navigatable from utils.appliance.implementations.ui import navigator, navigate_to, CFMENavigateStep from utils.pretty import Pretty from utils.update import Updateable from utils.version import LOWEST, pick assignment_tree = Tree("//div[@id='cb_assignments_treebox']/ul") match_page = partial(match_location, controller='chargeback', title='Chargeback') class ChargebackView(BaseLoggedInPage): @property def in_chargeback(self): return ( self.logged_in_as_current_user and self.navigation.currently_selected == ['Cloud Intel', 'Chargeback']) @property def is_displayed(self): return ( self.in_chargeback and self.title.text == 'Compute Chargeback Rates') configuration = Dropdown('Configuration') class AddComputeChargebackView(ChargebackView): title = Text('#explorer_title_text') description = Input(id='description') currency = Select(id='currency') @ParametrizedView.nested class fields(ParametrizedView): # noqa PARAMETERS = ('name',) ROOT = ParametrizedLocator('.//tr[./td[contains(normalize-space(.), {name|quote})]]') @cached_property def row_id(self): attr = self.browser.get_attribute( 'id', './td/select[starts-with(@id, "per_time_")]', parent=self) return int(attr.rsplit('_', 1)[-1]) @cached_property def sub_row_id(self): attr = self.browser.get_attribute( 'id', './td/input[starts-with(@id, "fixed_rate_")]', parent=self) return int(attr.rsplit('_', 1)[-1]) per_time = Select(id=ParametrizedString('per_time_{@row_id}')) per_unit = Select(id=ParametrizedString('per_unit_{@row_id}')) start = Input(id=ParametrizedString('start_{@row_id}_{@sub_row_id}')) finish = Input(id=ParametrizedString('finish_{@row_id}_{@sub_row_id}')) fixed_rate = Input(id=ParametrizedString('fixed_rate_{@row_id}_{@sub_row_id}')) variable_rate = Input(id=ParametrizedString('variable_rate_{@row_id}_{@sub_row_id}')) action_add = Button(title='Add a new tier') action_delete = Button(title='Remove the tier') add_button = Button(title='Add') cancel_button = Button(title='Cancel') @property def is_displayed(self): return ( self.in_explorer and self.title.text == 'Compute Chargeback Rates' and self.description.is_displayed) class EditComputeChargebackView(AddComputeChargebackView): save_button = Button(title='Save Changes') reset_button = Button(title='Reset Changes') @property def is_displayed(self): return ( self.in_explorer and self.title.text == 'Compute Chargeback Rate "{}"'.format(self.obj.description)) class StorageChargebackView(ChargebackView): @property def is_displayed(self): return ( self.in_chargeback and self.title.text == 'Storage Chargeback Rates') class AddStorageChargebackView(AddComputeChargebackView): @property def is_displayed(self): return ( self.in_explorer and self.title.text == 'Storage Chargeback Rates' and self.description.is_displayed) class EditStorageChargebackView(EditComputeChargebackView): @property def is_displayed(self): return ( self.in_explorer and self.title.text == 'Storage Chargeback Rate "{}"'.format(self.obj.description)) class AssignFormTable(Pretty): pretty_attrs = ["entry_loc"] def __init__(self, entry_loc): self.entry_loc = entry_loc def locate(self): return self.entry_loc @property def rows(self): return sel.elements("./tbody/tr", root=self) def row_by_name(self, name): for row in self.rows: row_name = sel.text_sane(sel.element("./td[1]", root=row)) if row_name == name: return row else: raise NameError("Did not find row named {}!".format(name)) def select_from_row(self, row): el = pick({"5.6": "./td/select", "5.7": "./td/div/select"}) return Select_old(sel.element(el, root=row)) def select_by_name(self, name): return self.select_from_row(self.row_by_name(name)) @fill.method((AssignFormTable, Mapping)) def _fill_assignform_dict(form, d): d = copy(d) # Mutable for name, value in d.iteritems(): if value is None: value = "<Nothing>" select = form.select_by_name(name) sel.select(select, value) assign_form = Form( fields=[ ("assign_to", Select_old("select#cbshow_typ")), # Enterprise ("enterprise", Select_old("select#enterprise__1")), # Simple shotcut, might explode once # Tagged DS ("tag_category", Select_old("select#cbtag_cat")), # Docker Labels ("docker_labels", Select_old('select#cblabel_key')), # Common - selection table ("selections", AssignFormTable({ LOWEST: ( "//div[@id='cb_assignment_div']/fieldset/table[contains(@class, 'style1')]" "/tbody/tr/td/table"), "5.4": "//div[@id='cb_assignment_div']/table[contains(@class, 'table')]", })), ('save_button', form_buttons.save)]) class ComputeRate(Updateable, Pretty, Navigatable): pretty_attrs = ['description'] def __init__(self, description=None, currency=None, fields=None, appliance=None, ): Navigatable.__init__(self, appliance=appliance) self.description = description self.currency = currency self.fields = fields def __getitem__(self, name): return self.fields.get(name, None) def create(self): view = navigate_to(self, 'New') view.fill_with({'description': self.description, 'currency': self.currency, 'fields': self.fields}, on_change=view.add_button, no_change=view.cancel_button) view.flash.assert_success_message('Chargeback Rate "{}" was added'.format( self.description)) def update(self, updates): view = navigate_to(self, 'Edit') view.fill_with(updates, on_change=view.save_button, no_change=view.cancel_button) view.flash.assert_success_message('Chargeback Rate "{}" was saved'.format( updates.get('description'))) def delete(self): view = navigate_to(self, 'Details') view.configuration.item_select('Remove from the VMDB', handle_alert=True) view.flash.assert_success_message('Chargeback Rate "{}": Delete successful'.format( self.description)) @navigator.register(ComputeRate, 'All') class ComputeRateAll(CFMENavigateStep): VIEW = ChargebackView prerequisite = NavigateToAttribute('appliance.server', 'LoggedIn') def step(self): self.prerequisite_view.navigation.select('Cloud Intel', 'Chargeback') accordion.tree("Rates", "Rates", "Compute") @navigator.register(ComputeRate, 'New') class ComputeRateNew(CFMENavigateStep): VIEW = AddComputeChargebackView prerequisite = NavigateToSibling('All') def step(self): self.view.configuration.item_select("Add a new Chargeback Rate") @navigator.register(ComputeRate, 'Details') class ComputeRateDetails(CFMENavigateStep): VIEW = ChargebackView prerequisite = NavigateToSibling('All') def step(self): accordion.tree("Rates", "Rates", "Compute", self.obj.description) @navigator.register(ComputeRate, 'Edit') class ComputeRateEdit(CFMENavigateStep): VIEW = EditComputeChargebackView prerequisite = NavigateToSibling('Details') def step(self): self.view.configuration.item_select("Edit this Chargeback Rate") class StorageRate(ComputeRate, Updateable, Pretty, Navigatable): # Identical methods and form with ComputeRate but different navigation pretty_attrs = ['description'] @navigator.register(StorageRate, 'All') class StorageRateAll(CFMENavigateStep): VIEW = StorageChargebackView prerequisite = NavigateToAttribute('appliance.server', 'LoggedIn') def step(self): self.prerequisite_view.navigation.select('Cloud Intel', 'Chargeback') accordion.tree("Rates", "Rates", "Storage") @navigator.register(StorageRate, 'New') class StorageRateNew(CFMENavigateStep): VIEW = AddStorageChargebackView prerequisite = NavigateToSibling('All') def step(self): self.view.configuration.item_select("Add a new Chargeback Rate") @navigator.register(StorageRate, 'Details') class StorageRateDetails(CFMENavigateStep): VIEW = ChargebackView prerequisite = NavigateToSibling('All') def step(self): accordion.tree("Rates", "Rates", "Storage", self.obj.description) @navigator.register(StorageRate, 'Edit') class StorageRateEdit(CFMENavigateStep): VIEW = EditStorageChargebackView prerequisite = NavigateToSibling('Details') def step(self): self.view.configuration.item_select("Edit this Chargeback Rate") class Assign(Updateable, Pretty, Navigatable): """ Model of Chargeback Assignment page in cfme. Args: assign_to: Assign the chargeback rate to entities such as VM,Provider,datastore or the Enterprise itself. tag_category: Tag category of the entity selections: Selection of a particular entity to which the rate is to be assigned. Eg:If the chargeback rate is to be assigned to providers,select which of the managed providers the rate is to be assigned. Usage: tagged_datastore = Assign( assign_to="Tagged Datastores", tag_category="Location", selections={ "Chicago": "Default" }) tagged_datastore.storageassign() """ def __init__(self, assign_to=None, tag_category=None, docker_labels=None, selections=None, appliance=None): Navigatable.__init__(self, appliance=appliance) self.assign_to = assign_to self.tag_category = tag_category self.docker_labels = docker_labels self.selections = selections def storageassign(self): navigate_to(self, 'Storage') fill(assign_form, {'assign_to': self.assign_to, 'tag_category': self.tag_category, 'selections': self.selections}, action=assign_form.save_button) flash.assert_no_errors() def computeassign(self): navigate_to(self, 'Compute') fill(assign_form, {'assign_to': self.assign_to, 'tag_category': self.tag_category, 'docker_labels': self.docker_labels, 'selections': self.selections}, action=assign_form.save_button) flash.assert_no_errors() @navigator.register(Assign, 'All') class AssignAll(CFMENavigateStep): prerequisite = NavigateToAttribute('appliance.server', 'LoggedIn') def step(self): self.prerequisite_view.navigation.select('Cloud Intel', 'Chargeback') accordion.tree("Rates", "Rates", "Storage") @navigator.register(Assign, 'Storage') class AssignStorage(CFMENavigateStep): prerequisite = NavigateToSibling('All') def step(self): accordion.tree("Assignments", "Assignments", "Storage") def am_i_here(self): return match_page(summary='Storage Rate Assignments') @navigator.register(Assign, 'Compute') class AssignCompute(CFMENavigateStep): prerequisite = NavigateToSibling('All') def step(self): accordion.tree("Assignments", "Assignments", "Compute") def am_i_here(self): return match_page(summary='Compute Rate Assignments')
# -*- coding: utf-8 -*- import pytest from cfme.common.provider import cleanup_vm from cfme.infrastructure.provider.virtualcenter import VMwareProvider from cfme.services.catalogs.service_catalogs import ServiceCatalogs from cfme.services import requests from cfme import test_requirements from utils import testgen from utils.wait import wait_for pytestmark = [ pytest.mark.meta(server_roles="+automate"), pytest.mark.usefixtures('vm_name', 'uses_infra_providers', 'catalog_item'), pytest.mark.long_running, test_requirements.service, pytest.mark.tier(3) ] pytest_generate_tests = testgen.generate([VMwareProvider], scope="module") def test_copy_request(setup_provider, provider, catalog_item, request): """Automate BZ 1194479""" vm_name = catalog_item.provisioning_data["vm_name"] request.addfinalizer(lambda: cleanup_vm(vm_name + "_0001", provider)) catalog_item.create() service_catalogs = ServiceCatalogs(catalog_item.catalog, catalog_item.name) service_catalogs.order() row_description = catalog_item.name cells = {'Description': row_description} row, __ = wait_for(requests.wait_for_request, [cells, True], fail_func=requests.reload, num_sec=1800, delay=20) requests.go_to_request(cells)
rlbabyuk/integration_tests
cfme/tests/services/test_request.py
cfme/intelligence/chargeback.py
#!/usr/bin/env python # encoding: utf-8 # # pmatic - Python API for Homematic. Easy to use. # Copyright (C) 2016 Lars Michelsen <lm@larsmichelsen.com> # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along # with this program; if not, write to the Free Software Foundation, Inc., # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. """Implements the main components of the pmatic manager""" # Add Python 3.x behaviour to 2.7 from __future__ import absolute_import from __future__ import division from __future__ import print_function from __future__ import unicode_literals try: from builtins import object # pylint:disable=redefined-builtin except ImportError: pass try: # Python 2.x import __builtin__ as builtins except ImportError: # Python 3+ import builtins import os import re import cgi import sys import time import json import uuid import socket import signal import inspect import traceback import threading import contextlib import subprocess import wsgiref.simple_server from hashlib import sha256 import pytz try: from grp import getgrnam from pwd import getpwnam except ImportError: # don't raise missing e.g. grp module on windows platform (does not exist) # The manager is not working on windows. At least allow to import it for tests. if sys.platform == "win32": pass try: from Cookie import SimpleCookie except ImportError: from http.cookies import SimpleCookie try: import Queue as queue except ImportError: import queue try: from StringIO import StringIO except ImportError: from io import StringIO try: from urlparse import urlparse except ImportError: from urllib.parse import urlparse import pmatic import pmatic.utils as utils from pmatic.exceptions import PMUserError, SignalReceived, PMException from pmatic.residents import Residents, Resident, PersonalDevice, PersonalDeviceFritzBoxHost # Set while a script is executed with the "/run" page g_runner = None class Config(utils.LogMixin): config_path = "/etc/config/addons/pmatic/etc" state_path = "/var/lib/pmatic" # not reboot persistent! script_path = "/etc/config/addons/pmatic/scripts" static_path = "/etc/config/addons/pmatic/manager_static" ccu_enabled = True ccu_address = None ccu_credentials = None log_level = "INFO" log_file = "/var/log/pmatic-manager.log" timezone = "Europe/Berlin" event_history_length = 1000 presence_update_interval = 60 # seconds pushover_api_token = None pushover_user_token = None fritzbox_enabled = False fritzbox_address = "fritz.box" fritzbox_port = 49000 fritzbox_username = "" fritzbox_password = "" @classmethod def load(cls): try: try: config = json.load(open(cls._config_path())) except IOError as e: # a non existing file is allowed. if e.errno == 2: config = {} else: raise except Exception: cls.cls_logger().error("Failed to load the config. Terminating.", exc_info=True) sys.exit(1) for key, val in config.items(): setattr(cls, key, val) @classmethod def save(cls): config = {} for key, val in cls.__dict__.items(): if key[0] != "_" and key not in [ "config_path", "script_path", "static_path", "log_file" ] \ and not inspect.isroutine(val): config[key] = val if not os.path.exists(os.path.dirname(cls._config_path())): os.makedirs(os.path.dirname(cls._config_path())) json_config = json.dumps(config) open(cls._config_path(), "w").write(json_config + "\n") @classmethod def _config_path(cls): return cls.config_path + "/manager.config" class Html(object): html_escape_table = { "&": "&amp;", '"': "&quot;", "'": "&apos;", ">": "&gt;", "<": "&lt;", } def __init__(self): super(Html, self).__init__() self._form_vars = [] def page_header(self): self.write('<!DOCTYPE HTML>\n' '<html><head>\n') self.write("<meta http-equiv=\"Content-Type\" " "content=\"text/html; charset=utf-8\">\n") self.write("<meta http-equiv=\"X-UA-Compatible\" " "content=\"IE=edge\">\n") self.write("<link rel=\"stylesheet\" href=\"/css/font-awesome.min.css\">\n") self.write("<link rel=\"stylesheet\" href=\"/css/pmatic.css\">\n") self.write("<link rel=\"shortcut icon\" href=\"/favicon.ico\">\n") self.write('<title>%s</title>\n' % self.escape(self.title())) self.write('</head>\n') self.write("<body>\n") def page_footer(self): self.write("</body>") self.write("</html>") def navigation(self): self.write("<ul id=\"navigation\">\n") self.write("<li><a href=\"/\"><i class=\"fa fa-code\"></i>Scripts</a></li>\n") self.write("<li><a href=\"/run\"><i class=\"fa fa-flash\"></i>Execute Scripts</a></li>\n") self.write("<li><a href=\"/schedule\"><i class=\"fa fa-tasks\"></i>Schedule</a></li>\n") self.write("<li><a href=\"/residents\"><i class=\"fa fa-users\"></i>Residents</a></li>\n") self.write("<li><a href=\"/event_log\"><i class=\"fa fa-list\"></i>Event Log</a></li>\n") self.write("<li><a href=\"/config\"><i class=\"fa fa-gear\"></i>Configuration</a></li>\n") self.write("<li class=\"right\"><a href=\"https://larsmichelsen.github.io/pmatic/\" " "target=\"_blank\">pmatic %s</a></li>\n" % self.escape(pmatic.__version__)) self.write("<li class=\"right\"><a href=\"/state\"><i class=\"fa fa-heart\"></i>" "State</a></li>\n") self.write("</ul>\n") def is_action(self): return bool(self._vars.getvalue("action")) def is_checked(self, name): return self._vars.getvalue(name) is not None def add_missing_vars(self): """Adds the vars which have been used to call this page but are yet missing in the current form as hidden vars to the form.""" for key in self._vars.keys(): if key not in self._form_vars: self.hidden(key, self._vars.getvalue(key)) def begin_form(self, multipart=None): self._form_vars = [] enctype = " enctype=\"multipart/form-data\"" if multipart else "" target_url = self.url or "/" self.write("<form method=\"post\" action=\"%s\" %s>\n" % (self.escape(target_url), enctype)) def end_form(self): self.write("</form>\n") def file_upload(self, name, accept="text/*"): self._form_vars.append(name) self.write("<input name=\"%s\" type=\"file\" accept=\"%s\">" % (self.escape(name), self.escape(accept))) def hidden(self, name, value): self._form_vars.append(name) self.write("<input type=\"hidden\" name=\"%s\" value=\"%s\">\n" % (self.escape(name), self.escape(value))) def password(self, name): self._form_vars.append(name) self.write("<input type=\"password\" name=\"%s\">\n" % self.escape(name)) def submit(self, label, value="1", name="action"): self._form_vars.append(name) self.write("<button type=\"submit\" name=\"%s\" " "value=\"%s\">%s</button>\n" % (self.escape(name), self.escape(value), self.escape(label))) def input(self, name, deflt=None, cls=None): self._form_vars.append(name) value = deflt if deflt is not None else "" css = (" class=\"%s\"" % self.escape(cls)) if cls else "" self.write("<input type=\"text\" name=\"%s\" value=\"%s\"%s>\n" % (self.escape(name), self.escape(value), css)) def checkbox(self, name, deflt=False): self._form_vars.append(name) checked = " checked" if deflt else "" self.write("<input type=\"checkbox\" name=\"%s\"%s>\n" % (self.escape(name), self.escape(checked))) def select(self, name, choices, deflt=None, onchange=None): self._form_vars.append(name) onchange = " onchange=\"%s\"" % self.escape(onchange) if onchange else "" self.write("<select name=\"%s\"%s>\n" % (self.escape(name), onchange)) self.write("<option value=\"\"></option>\n") for choice in choices: if deflt == choice[0]: selected = " selected" else: selected = "" self.write("<option value=\"%s\"%s>%s</option>\n" % (self.escape(choice[0]), selected, self.escape(choice[1]))) self.write("</select>\n") def icon(self, icon_name, title, cls=None): css = " " + cls if cls else "" self.write("<i class=\"fa fa-%s%s\" title=\"%s\"></i>" % (self.escape(icon_name), self.escape(css), self.escape(title))) def icon_button(self, icon_name, url, title): self.write("<a class=\"icon_button\" href=\"%s\">" % self.escape(url)) self.icon(icon_name, title) self.write("</a>") def button(self, icon_name, label, url): self.write("<a class=\"button\" href=\"%s\">" % self.escape(url)) if icon_name is not None: self.icon(icon_name, "") self.write(self.escape(label)) self.write("</a>\n") def error(self, text): self.message(text, "error", "bomb") def success(self, text): self.message(text, "success", "check-circle-o") def info(self, text): self.message(text, "info", "info-circle") def message(self, text, cls, icon): self.write("<div class=\"message %s\"><i class=\"fa fa-2x fa-%s\"></i> " "%s</div>\n" % (self.escape(cls), self.escape(icon), text)) def confirm(self, text): confirm = self._vars.getvalue("_confirm") if not confirm: self.begin_form() self.message(text, "confirm", "question-circle") self.submit("Yes", "yes", name="_confirm") self.button(None, "No", "javascript:window.history.back()") self.add_missing_vars() self.end_form() return False elif confirm == "yes": return True def h2(self, title): self.write("<h2>%s</h2>\n" % self.escape(title)) def h3(self, title): self.write("<h3>%s</h3>\n" % self.escape(title)) # FIXME: Escaping - Needs to be escaped here or at all callers def p(self, content): self.write("<p>%s</p>\n" % content) def js_file(self, url): self.write("<script type=\"text/javascript\" src=\"%s\"></script>\n" % self.escape(url)) def js(self, script): self.write("<script type=\"text/javascript\">%s</script>\n" % script) def redirect(self, delay, url): self.js("setTimeout(\"location.href = '%s';\", %d);" % (url, delay*1000)) def escape(self, text): """Escape text for embedding into HTML code.""" if not utils.is_string(text): text = "%s" % text return "".join(self.html_escape_table.get(c, c) for c in text) def write_text(self, text): self.write(self.escape(text)) class FieldStorage(cgi.FieldStorage): def getvalue(self, key, default=None): value = cgi.FieldStorage.getvalue(self, key.encode("utf-8"), default) if value is not None: return value.decode("utf-8") else: return None class PageHandler(utils.LogMixin): _transids = {} @classmethod def pages(cls): pages = {} for subclass in cls.__subclasses__() + HtmlPageHandler.__subclasses__(): if hasattr(subclass, "url"): pages[subclass.url] = subclass return pages @classmethod def base_url(self, environ): parts = environ['PATH_INFO'].lstrip("/").split("/") return parts[0] @classmethod def is_password_set(self): return os.path.exists(os.path.join(Config.config_path, "manager.secret")) @classmethod def _get_auth_cookie_value(self, environ): for name, cookie in SimpleCookie(environ.get("HTTP_COOKIE")).items(): if name == "pmatic_auth": return cookie.value @classmethod def _is_authenticated(self, environ): value = self._get_auth_cookie_value(environ) if not value or value.count(":") != 1: return False salt, salted_hash = value.split(":", 1) filepath = os.path.join(Config.config_path, "manager.secret") secret = open(filepath).read().strip() to_hash = secret + salt if not utils.is_py2(): to_hash = to_hash.encode("utf-8") correct_hash = sha256(to_hash).hexdigest() return salted_hash == correct_hash @classmethod def get(cls, environ): pages = cls.pages() try: page = pages[cls.base_url(environ)] if cls.is_password_set() and not cls._is_authenticated(environ): return pages["login"] else: return page except KeyError: static_file_class = StaticFile.get(environ['PATH_INFO']) if not static_file_class: return pages["404"] else: return static_file_class def __init__(self, manager, environ, start_response): super(PageHandler, self).__init__() self._manager = manager self._env = environ self._start_response = start_response self._http_headers = [] self._set_http_header("Content-type", self._get_content_type()) self._page = [] self._vars = cgi.FieldStorage() self._read_environment() self._is_valid_transaction = False self._check_transaction() def _referer(self): """Returns the value of the HTTP_REFERER HTTP header (if available). Otherwise an empty string is returned.""" return self._env.get("HTTP_REFERER", "") def _request_url(self): """Returns the current URL requested by the client. Maybe not exactly the URL the user requested, but sufficient for us.""" url = self._env["PATH_INFO"] if self._env.get("QUERY_STRING"): url += "?" + self._env["QUERY_STRING"] return url def _origin_url(self): """Returns parts of the referer URL. It is constructed equal to :meth:`_reuqest_url` and used during transaction id validation""" parts = urlparse(self._referer()) url = parts.path if parts.query: url += "?" + parts.query return url def _new_transid(self): """Calculates a new random transaction id, adds it to the transaction id store and returns it to the caller.""" transid = uuid.uuid4().get_hex().lower()[:6] self._transids[transid] = (time.time(), self._request_url()) return transid def _cleanup_transids(self): """Removes old, unused transaction ids that are older than 2 hours.""" for transid, (issue_time, url) in self._transids.items(): if issue_time > 7200: self._invalidate_transid(transid) def _invalidate_transid(self, transid): del self._transids[transid] def _check_transaction(self): """Checks whether or not this request is a valid transaction and sets the state in this object for later use.""" transid = self._vars.getvalue("_transid") if not transid: return False issue_time, url = self._transids.get(transid, (None, None)) if issue_time == None and url == None: return False self._is_valid_transaction = url == self._origin_url() self._invalidate_transid(transid) def _transid_valid(self): return self._is_valid_transaction def action_url(self, url): if "?" in url: url += "&" else: url += "?" return url + "_transid=%s" % self._new_transid() def _get_content_type(self): return "text/html; charset=UTF-8" def _read_environment(self): self._read_vars() def _set_cookie(self, name, value): cookie = SimpleCookie() cookie[name.encode("utf-8")] = value.encode("utf-8") self._set_http_header("Set-Cookie", cookie[name.encode("utf-8")].OutputString()) @property def vars(self): return self._vars def _read_vars(self): wsgi_input = self._env["wsgi.input"] if not wsgi_input: return self._vars = FieldStorage(fp=wsgi_input, environ=self._env, keep_blank_values=1) def _send_http_header(self): self._start_response(self._http_status(200), self._http_headers) def _set_http_header(self, k, v): if utils.is_py2(): self._http_headers.append((k.encode("utf-8"), v.encode("utf-8"))) else: self._http_headers.append((k, v)) def process_page(self): self._send_http_header() self.page_header() self.navigation() self.write("<div id=\"content\">\n") action_result = None if self.is_action() and self._transid_valid(): try: action_result = self.action() except PMUserError as e: self.error(e) except Exception as e: self.error("Unhandled exception: %s" % e) self.logger.debug("Unhandled exception (action)", exc_info=True) # The action code can disable regular rendering of the page, # e.g. to only show a confirmation dialog. if action_result != False: try: self.process() except PMUserError as e: self.error(e) except Exception as e: self.error("Unhandled exception: %s" % e) self.logger.debug("Unhandled exception (process)", exc_info=True) self.write("\n</div>") self.page_footer() return [b"".join(self._page)] def ensure_password_is_set(self): if not self.is_password_set(): raise PMUserError("To be able to access this page you first have to " "<a href=\"/config\">set a password</a> and authenticate " "afterwards.") def title(self): return "No title specified" def action(self): self.write_text("Not implemented yet.") def process(self): self.write_text("Not implemented yet.") def write(self, code): if utils.is_text(code): code = code.encode("utf-8") self._page.append(code) def _http_status(self, code): if code == 200: txt = '200 OK' elif code == 301: txt = '301 Moved Permanently' elif code == 302: txt = '302 Found' elif code == 304: txt = '304 Not Modified' elif code == 404: txt = '404 Not Found' elif code == 500: txt = '500 Internal Server Error' else: txt = '%d' % code if utils.is_py2(): return txt.encode("utf-8") else: return txt class HtmlPageHandler(PageHandler, Html): def begin_form(self, multipart=None): super(HtmlPageHandler, self).begin_form(multipart) self._add_transid_field() def _add_transid_field(self): self.hidden("_transid", self._new_transid()) class StaticFile(PageHandler): @classmethod def get(self, path_info): if ".." in path_info: return # don't allow .. in paths to prevent opening of unintended files if path_info.startswith("/css/") or path_info.startswith("/fonts/") \ or path_info.startswith("/scripts/") or path_info.startswith("/js/") \ or path_info == "/favicon.ico": file_path = StaticFile.system_path_from_pathinfo(path_info) if os.path.exists(file_path): return StaticFile @classmethod def system_path_from_pathinfo(self, path_info): if path_info.startswith("/scripts/"): return os.path.join(Config.script_path, path_info[9:]) else: return os.path.join(Config.static_path, path_info.lstrip("/")) def _get_content_type(self): ext = self._env["PATH_INFO"].split(".")[-1] if ext == "css": return "text/css; charset=UTF-8" elif ext == "js": return "application/x-javascript; charset=UTF-8" elif ext == "otf": return "application/vnd.ms-opentype" elif ext == "eot": return "application/vnd.ms-fontobject" elif ext == "ttf": return "application/x-font-ttf" elif ext == "woff": return "application/octet-stream" elif ext == "woff2": return "application/octet-stream" elif ext == "ico": return "image/x-icon" else: return "text/plain; charset=UTF-8" def _check_cached(self, file_path): client_cached_age = self._env.get('HTTP_IF_MODIFIED_SINCE', None) if not client_cached_age: return False # Try to parse the If-Modified-Since HTTP header provided by # the client to make client cache usage possible. try: t = time.strptime(client_cached_age, "%a, %d %b %Y %H:%M:%S %Z") if t == time.gmtime(os.stat(file_path).st_mtime): return True except ValueError: # strptime raises ValueError when wrong time format is being provided return False def process_page(self): path_info = self._env["PATH_INFO"] file_path = StaticFile.system_path_from_pathinfo(self._env["PATH_INFO"]) is_cached = self._check_cached(file_path) if is_cached: self._start_response(self._http_status(304), []) return [] mtime = os.stat(file_path).st_mtime self._set_http_header("Last-Modified", time.strftime("%a, %d %b %Y %H:%M:%S UTC", time.gmtime(mtime))) if path_info.startswith("/scripts"): self._set_http_header("Content-Disposition", "attachment; filename=\"%s\"" % os.path.basename(path_info)) self._start_response(self._http_status(200), self._http_headers) return [ l for l in open(file_path, "r") ] class AbstractScriptProgressPage(Html): def __init__(self): super(AbstractScriptProgressPage, self).__init__() self._runner = None def _abort_url(self): raise NotImplementedError() def _handle_abort(self): if not self._is_running(): raise PMUserError("There is no script running to abort.") self._runner.abort() self.success("The script has been aborted.") def _progress(self): self.h2(self.title()) if not self._is_started(): self.p("There is no script running.") return runner = self._runner self.write("<table>") self.write("<tr><th>Script</th>" "<td>%s</td></tr>" % self.escape(self._runner.script)) self.write("<tr><th>Started at</th>" "<td>%s</td></tr>" % time.strftime("%Y-%m-%d %H:%M:%S", utils.localtime(runner.started, Config.timezone))) self.write("<tr><th>Finished at</th>" "<td>") if not self._is_running() and runner.finished is not None: self.write_text(time.strftime("%Y-%m-%d %H:%M:%S", utils.localtime(runner.finished, Config.timezone))) else: self.write_text("-") self.write("</td></tr>") self.write("<tr><th>Current state</th>" "<td>") if self._is_running(): self.icon("spinner", "The script is running...", cls="fa-pulse") self.write_text(" Running... ") if runner.abortable: self.icon_button("close", self._abort_url(), "Stop this script.") elif runner.exit_code is not None: if runner.exit_code == 0: self.icon("check", "Successfully finished") else: self.icon("times", "An error occured") self.write(" Finished (Exit code: <tt>%d</tt>)" % runner.exit_code) else: self.icon("times", "Not running") self.write_text(" Started but not running - something went wrong.") self.write("</td></tr>") self.write("<tr><th class=\"toplabel\">Output</th>") self.write("<td>") output = self.escape(self._output()) or "<i>(no output)</i>" self.write("<pre id=\"output\">%s</pre>" % output) self.write("</td></tr>") self.write("</table>") if self._is_running(): self.js_file("js/update_output.js") def _set_runner(self, runner): self._runner = runner def _is_started(self): return self._runner is not None def _is_running(self): return self._runner and self._runner.is_running def _exit_code(self): return self._runner.exit_code def _output(self): return "".join(self._runner.output.getvalue()) class PageMain(HtmlPageHandler, utils.LogMixin): url = "" def title(self): return "Manage pmatic Scripts" def save_script(self, filename, script): if not os.path.exists(Config.script_path): os.makedirs(Config.script_path) filepath = os.path.join(Config.script_path, filename) open(filepath, "w").write(script) os.chmod(filepath, 0o755) def action(self): self.ensure_password_is_set() action = self._vars.getvalue("action") if action == "upload": self._handle_upload() elif action == "delete": return self._handle_delete() def _handle_upload(self): if not self._vars.getvalue("script"): raise PMUserError("You need to select a script to upload.") filename = self._vars["script"].filename script = self._vars["script"].file.read() first_line = script.split(b"\n", 1)[0] if not first_line.startswith(b"#!/usr/bin/python") \ and not first_line.startswith(b"#!/usr/bin/env python"): raise PMUserError("The uploaded file does not seem to be a pmatic script.") if len(script) > 1048576: raise PMUserError("The uploaded file is too large.") self.save_script(filename, script) self.success("The script has been uploaded.") def _handle_delete(self): filename = self._vars.getvalue("script") if not self.confirm("Do you really want to delete the script %s?" % filename): return False if not filename: raise PMUserError("You need to provide a script name to delete.") if filename not in self._manager.get_scripts(): raise PMUserError("This script does not exist.") filepath = os.path.join(Config.script_path, filename) os.unlink(filepath) self.success("The script has been deleted.") def process(self): self.ensure_password_is_set() self.upload_form() self.scripts() def upload_form(self): self.h2("Upload Script") self.p("You can either upload your scripts using this form or " "copy the files on your own, e.g. using SFTP or SCP, directly " "to <tt>%s</tt>." % self.escape(Config.script_path)) self.p("Please note that existing scripts with equal names will be overwritten " "without warning.") self.write("<div class=\"upload_form\">\n") self.begin_form(multipart=True) self.file_upload("script") self.submit("Upload script", "upload") self.end_form() self.write("</div>\n") def scripts(self): self.h2("Scripts") self.write("<div class=\"scripts\">\n") self.write("<table><tr>\n") self.write("<th>Actions</th>" "<th class=\"largest\">Filename</th>" "<th>Last modified</th></tr>\n") for filename in self._manager.get_scripts(): path = os.path.join(Config.script_path, filename) last_mod_ts = os.stat(path).st_mtime self.write("<tr>") self.write("<td>") self.icon_button("trash", self.action_url("?action=delete&script=%s" % filename), "Delete this script") self.icon_button("bolt", self.action_url("/run?script=%s&action=run" % filename), "Execute this script now") self.icon_button("download", "/scripts/%s" % filename, "Download this script") self.write("</td>") self.write("<td>%s</td>" % self.escape(filename)) self.write("<td>%s</td>" % time.strftime("%Y-%m-%d %H:%M:%S", utils.localtime(last_mod_ts, Config.timezone))) self.write("</tr>") self.write("</table>\n") self.write("</div>\n") class PageRun(HtmlPageHandler, AbstractScriptProgressPage, utils.LogMixin): url = "run" def title(self): return "Execute pmatic Scripts" def _abort_url(self): return self.action_url("/run?action=abort") def action(self): self.ensure_password_is_set() action = self._vars.getvalue("action") if action == "run": self._handle_run() elif action == "abort": self._set_runner(g_runner) self._handle_abort() def _handle_run(self): script = self._vars.getvalue("script") if not script: raise PMUserError("You have to select a script.") if script not in self._manager.get_scripts(): raise PMUserError("You have to select a valid script.") run_inline = self.is_checked("run_inline") if self._is_running(): raise PMUserError("There is another script running. Wait for it to complete " "or stop it to be able to execute another script.") self._execute_script(script, run_inline) self.success("The script has been started.") def process(self): self.ensure_password_is_set() self._start_form() self._set_runner(g_runner) self._progress() def _start_form(self): self.h2("Execute Scripts") self.p("This page is primarily meant for testing purposes. You can choose " "which script you like to execute and then start it. The whole output of " "the script is captured and shown in the progress area below. You " "can execute only one script at the time. Please note: You are totally " "free to execute your scripts on the command line or however you like.") self.write("<div class=\"execute_form\">\n") self.begin_form() self.write_text("Select the script: ") self.select("script", sorted([ (s, s) for s in self._manager.get_scripts() ]), deflt=self._vars.getvalue("script")) self.write_text("Run inline: ") self.checkbox("run_inline", self.is_checked("run_inline")) self.submit("Run script", "run") self.end_form() self.write("</div>\n") def _execute_script(self, script, run_inline): global g_runner g_runner = ScriptRunner(self._manager, script, run_inline=run_inline) g_runner.start() class PageAjaxUpdateOutput(HtmlPageHandler, utils.LogMixin): url = "ajax_update_output" def _get_content_type(self): return "text/plain; charset=UTF-8" def process_page(self): output = [] self._start_response(self._http_status(200), self._http_headers) if not g_runner: return output # Tell js code to continue reloading or not if not g_runner.is_running: self.write_text("0") else: self.write_text("1") self.write_text("".join(g_runner.output)) return [b"".join(self._page)] class PageLogin(HtmlPageHandler, utils.LogMixin): url = "login" def title(self): return "Log in to pmatic Manager" def action(self): password = self._vars.getvalue("password") if not password: raise PMUserError("Invalid password.") filepath = os.path.join(Config.config_path, "manager.secret") secret = open(filepath).read().strip() if secret != sha256(password).hexdigest(): raise PMUserError("Invalid password.") self._login(secret) self.success("You have been authenticated. You can now <a href=\"/\">proceed</a>.") self.redirect(2, "/") def _login(self, secret): salt = "%d" % int(time.time()) salted_hash = sha256(secret + salt).hexdigest() cookie_value = salt + ":" + salted_hash self._set_cookie("pmatic_auth", cookie_value) def process(self): self.h2("Login") self.p("Welcome to the pmatic Manager. Please provide your manager " "password to log in.") self.write("<div class=\"login\">\n") self.begin_form() self.password("password") self.submit("Log in", "login") self.end_form() self.write("</div>\n") class PageEditResident(HtmlPageHandler, utils.LogMixin): url = "edit_resident" def _get_mode(self): return "edit" def _get_resident(self): resident_id = self._vars.getvalue("resident_id") if resident_id is None: raise PMUserError("You need to provide a <tt>resident_id</tt>.") resident_id = int(resident_id) if not self._manager.residents.exists(resident_id): raise PMUserError("The schedule you are trying to edit does not exist.") return self._manager.residents.get(resident_id) def _get_device_types(self): types = [] for subclass in PersonalDevice.types(): types.append((subclass.type_name, subclass.type_title)) return types def _get_manager_device_class(self, cls): return globals()["Manager"+cls.__name__] def _set_submitted_vars(self, resident, submit): if self._vars.getvalue("submitted") == "1": # submitted for reload or saving! resident.name = self._vars.getvalue("name") if submit and not resident.name: raise PMUserError("You have to provide a name.") resident.email = self._vars.getvalue("email") if submit and resident.email and \ not re.match(r"(^[a-zA-Z0-9_.+-]+@[a-zA-Z0-9-]+\.[a-zA-Z0-9-.]+$)", resident.email): raise PMUserError("You have to provide either none or a valid email address.") resident.mobile = self._vars.getvalue("mobile") resident.pushover_token = self._vars.getvalue("pushover_token") num_devices = int(self._vars.getvalue("num_devices")) resident.clear_devices() has_error = False for device_id in range(num_devices): device_type = self._vars.getvalue("device_%d_type" % device_id) if device_type: cls = PersonalDevice.get(device_type) if not cls: raise PMUserError("Invalid device type \"%s\" given." % device_type) manager_cls = self._get_manager_device_class(cls) device = cls() try: manager_cls.set_submitted_vars(self, device, "device_%d_" % device_id) except PMUserError as e: if submit: self.error(e) has_error = True resident.add_device(device) if submit and has_error: raise PMUserError("An error occured, please correct this.") def action(self): resident = self._get_resident() self._set_submitted_vars(resident, submit=True) if self._get_mode() == "new": self._manager.residents.add(resident) self._manager.residents.save() self.success("The resident has been saved. Opening the resident list now.") self.redirect(2, "/residents") def title(self): return "Edit resident" def process(self): self.h2(self.title()) mode = self._get_mode() resident = self._get_resident() self._set_submitted_vars(resident, submit=False) self.begin_form() if mode == "edit": self.hidden("resident_id", str(resident.id)) self.hidden("submitted", "1") self.write("<table>") self.write("<tr><th>Name</th><td>") self.input("name", resident.name) self.write("</td></tr>") self.write("<tr><th>Email</th><td>") self.input("email", resident.email) self.write("</td></tr>") self.write("<tr><th>Mobile Phone</th><td>") self.input("mobile", resident.mobile) self.write("</td></tr>") self.write("<tr><th>Pushover Token</th><td>") self.input("pushover_token", resident.pushover_token) self.write("</td></tr>") self.write("</table>") self.h3("Devices") self.p("To detect the presence you have to associate at least one device to the resident " "which is then used to detect the presence of this resident.") self.write("<table>") self.write("<tr>") self.write("<th>Type</th>") self.write("<th>Parameters</th>") self.write("</tr>") self.hidden("num_devices", str(len(resident.devices)+1)) for device_id, device in enumerate(resident.devices + [PersonalDevice()]): varprefix = "device_%d_" % device_id self.write("<tr>") self.write("<td>") self.select(varprefix+"type", self._get_device_types(), deflt=device.type_name, onchange="this.form.submit()") self.write("</td>") self.write("<td>") manager_cls = self._get_manager_device_class(device.__class__) manager_cls.input_parameters(self, device, varprefix) self.write("</td>") self.write("</tr>") self.write("</table>") self.submit("Save", "save") self.end_form() class PageAddResident(PageEditResident, PageHandler): url = "add_resident" def _get_mode(self): return "new" def _get_resident(self): return Resident(self._manager.residents) def title(self): return "Add resident" class ManagerPersonalDevice(object): @staticmethod def input_parameters(page, device, varprefix): pass @staticmethod def set_submitted_vars(page, device, varprefix): pass @staticmethod def display(device): return "" class ManagerPersonalDeviceFritzBoxHost(ManagerPersonalDevice): @staticmethod def input_parameters(page, device, varprefix): page.write("MAC address: ") page.input(varprefix+"mac", device.mac) @staticmethod def set_submitted_vars(page, device, varprefix): mac = page.vars.getvalue(varprefix+"mac") if mac != None: device.mac = mac @staticmethod def display(device): txt = "%s (%s)" % (device.name, device.mac) if not Config.fritzbox_enabled: txt += "<br><i>You need to configure the " \ "<a href=\"/config\">fritz!Box connection</a> to make this work.</i>" return txt class PageResidents(HtmlPageHandler, utils.LogMixin): url = "residents" def title(self): return "Residents & Presence detection" def action(self): self.ensure_password_is_set() action = self._vars.getvalue("action") if action == "delete": return self._handle_delete() def _handle_delete(self): resident_id = self._vars.getvalue("resident_id") if not resident_id: raise PMUserError("You need to provide a resident to delete.") resident_id = int(resident_id) if not self._manager.residents.exists(resident_id): raise PMUserError("This resident does not exist.") if not self.confirm("Do you really want to delete this resident?"): return False self._manager.residents.remove(resident_id) self._manager.residents.save() self.success("The resident has been deleted.") def _get_manager_device_class(self, cls): return globals()["Manager"+cls.__name__] def process(self): self.ensure_password_is_set() self.h2("Residents & Presence detection") self.p("This page lets you either configure your residents and the presence detection " "of them. Once configured and detected you can make your schedules execute " "specific scripts when one resident becomes present or leaves. Or you can " "make your scripts behave depending on presence. You can also reference your " "residents in your scripts, for example to get their attributes like the mail " "address or whatever you need.") self.button("user", "Add resident", "/add_resident") self.write("<br>") self.write("<br>") self.write("<table>") self.write("<tr><th>Actions</th><th>Name</th><th>Devices</th>" "<th>Last update</th><th>Last change</th><th>Present</th>") self.write("</tr>") for resident in self._manager.residents.residents: self.write("<tr>") self.write("<td>") self.icon_button("edit", "/edit_resident?resident_id=%d" % resident.id, "Edit this resident") self.icon_button("trash", self.action_url("?action=delete&resident_id=%d" % resident.id), "Delete this resident") self.write("</td>") self.write("<td>%s</td>" % self.escape(resident.name)) self.write("<td>") for device in resident.devices: manager_cls = self._get_manager_device_class(device.__class__) self.write(manager_cls.display(device)+"<br>") self.write("</td>") last_updated = resident.last_updated if last_updated: last_updated = time.strftime("%Y-%m-%d %H:%M:%S", utils.localtime(last_updated, Config.timezone)) else: last_updated = "<i>Got no information yet.</i>" self.write("<td>%s</td>" % last_updated) last_changed = resident.last_changed if last_changed: last_changed = time.strftime("%Y-%m-%d %H:%M:%S", utils.localtime(last_changed, Config.timezone)) else: last_changed = "<i>Got no information yet.</i>" self.write("<td>%s</td>" % last_changed) self.write("<td>") if resident.present: self.icon("home", "Is currently at home.") else: self.icon("question", "Is currently away.") self.write("</td>") self.write("</tr>") self.write("</table>") class PageConfiguration(HtmlPageHandler, utils.LogMixin): url = "config" def title(self): return "Configuration of pmatic Manager" def action(self): action = self._vars.getvalue("action") if action == "set_password": self._handle_set_password() elif action == "save_config": self._handle_save_config() def _handle_set_password(self): password = self._vars.getvalue("password") if not password: raise PMUserError("You need to provide a password and it must not be empty.") if len(password) < 6: raise PMUserError("The password must have a minimal length of 6 characters.") filepath = os.path.join(Config.config_path, "manager.secret") open(filepath, "w").write(sha256(password).hexdigest()+"\n") self.success("The password has been set. You will be redirect to the " "<a href=\"/\">login</a>.") self.redirect(2, "/") # FIXME: Validations! def _handle_save_config(self): log_level_name = self._vars.getvalue("log_level") if not log_level_name: Config.log_level = None else: Config.log_level = log_level_name pmatic.logging(Config.log_level) self._save_ccu_config() self._save_fritzbox_config() self._save_pushover_config() event_history_length = self._vars.getvalue("event_history_length") try: event_history_length = int(event_history_length) except ValueError: raise PMUserError("Invalid event history length given.") if event_history_length < 1: raise PMUserError("The minimum event history length is 1.") Config.event_history_length = event_history_length timezone = self._vars.getvalue("timezone") if timezone not in self._available_timezones(): raise PMUserError("Invalid timezone") Config.timezone = timezone presence_update_interval = self._vars.getvalue("presence_update_interval") try: presence_update_interval = int(presence_update_interval) except ValueError: raise PMUserError("Invalid presence update interval given.") if presence_update_interval < 1: raise PMUserError("The minimum presence update interval is 1 second.") Config.presence_update_interval = presence_update_interval Config.save() self.success("The configuration has been updated.") def _save_fritzbox_config(self): fritzbox_config_changed = False fritzbox_enabled = self.is_checked("fritzbox_enabled") if fritzbox_enabled != Config.fritzbox_enabled: fritzbox_config_changed = True Config.fritzbox_enabled = fritzbox_enabled fritzbox_address = self._vars.getvalue("fritzbox_address") if not fritzbox_address: raise PMUserError("You need to configure the fritz!Box address to be able to " "communicate with your fritz!Box.") if fritzbox_address != Config.fritzbox_address: fritzbox_config_changed = True Config.fritzbox_address = fritzbox_address fritzbox_port = self._vars.getvalue("fritzbox_port") if not fritzbox_port: raise PMUserError("You need to configure the fritz!Box port to be able to " "communicate with your fritz!Box.") else: fritzbox_port = int(fritzbox_port) if fritzbox_port != Config.fritzbox_port: fritzbox_config_changed = True Config.fritzbox_port = fritzbox_port fritzbox_username = self._vars.getvalue("fritzbox_username").strip() if fritzbox_username != Config.fritzbox_username: fritzbox_config_changed = True Config.fritzbox_username = fritzbox_username fritzbox_password = self._vars.getvalue("fritzbox_password") if fritzbox_password != "": if fritzbox_password != Config.fritzbox_password: fritzbox_config_changed = True Config.fritzbox_password = fritzbox_password if fritzbox_config_changed: # makes the connection be reinitialized on next request # FIXME: Move this to a method. PersonalDeviceFritzBoxHost.connection = None def _save_ccu_config(self): ccu_config_changed = False ccu_enabled = self.is_checked("ccu_enabled") if ccu_enabled != Config.ccu_enabled: ccu_config_changed = True Config.ccu_enabled = ccu_enabled ccu_address = self._vars.getvalue("ccu_address") if not ccu_address: ccu_address = None if ccu_address != Config.ccu_address: ccu_config_changed = True Config.ccu_address = ccu_address ccu_username = self._vars.getvalue("ccu_username").strip() ccu_password = self._vars.getvalue("ccu_password") if ccu_username and not ccu_password and Config.ccu_credentials: # not trying to change the password ccu_credentials = ccu_username, Config.ccu_credentials[1] elif not ccu_username: if Config.ccu_enabled and not utils.is_ccu(): raise PMUserError("You need to configure the CCU credentials to be able to " "communicate with your CCU.") ccu_credentials = None else: ccu_credentials = ccu_username, ccu_password if ccu_credentials != Config.ccu_credentials: ccu_config_changed = True Config.ccu_credentials = ccu_credentials if ccu_config_changed: self.logger.info("Reinitializing CCU connection (config changed)") self._manager.init_ccu() def _save_pushover_config(self): pushover_api_token = self._vars.getvalue("pushover_api_token") if not pushover_api_token: Config.pushover_api_token = None else: Config.pushover_api_token = pushover_api_token pushover_user_token = self._vars.getvalue("pushover_user_token") if not pushover_user_token: Config.pushover_user_token = None else: Config.pushover_user_token = pushover_user_token def _available_timezones(self): return pytz.common_timezones def process(self): self.password_form() self.config_form() def password_form(self): self.h2("Set Manager Password") self.p("To make the pmatic manager fully functional, you need to " "configure a password for accessing the manager first. Only after " "setting a password functions like uploading files are enabled.") self.write("<div class=\"password_form\">\n") self.begin_form() self.write("<table>") self.write("<tr><th>Password</th>") self.write("<td>") self.password("password") self.write("</td></tr>") self.write("</table>") self.submit("Set password", "set_password") self.end_form() self.write("</div>\n") def config_form(self): self.h2("Configuration") self.write("<div class=\"config_form\">\n") self.begin_form() self.write("<table>") self.write("<tr><th>Log level" "<p>Log entries having the configured log level (or a worse one) are logged to" " the file <tt>%s</tt> by default.</p>" "</th>" % Config.log_file) self.write("<td>") self.select("log_level", [ (l, l) for l in pmatic.log_level_names ], Config.log_level) self.write("</td>") self.write("</tr>") self.write("<tr><th>Timezone" "<p>When displaying local times or calculating with local times, use this " "time zone.</p>" "</th>") self.write("<td>") self.select("timezone", [ (l, l) for l in self._available_timezones() ], Config.timezone) self.write("</td>") self.write("</tr>") self.write("<tr><th>Event log entries" "<p>Number of event log entries to keep. Once you the pmatic manager received " "more events from the CCU, the older ones will be dropped.</p>" "</th>") self.write("<td>") self.input("event_history_length", str(Config.event_history_length)) self.write("</td>") self.write("</tr>") self.write("<tr><th>Presence update interval" "<p>You can configure in which intervals the presence information of your " "residents should be updated. This defaults to 60 seconds, but you can " "adapt it to your needs.</p>" "</th>") self.write("<td>") self.input("presence_update_interval", str(Config.presence_update_interval)) self.write("</td>") self.write("</tr>") self.write("</table>") self.h3("Connect to remote CCU") self.p("You can start the pmatic Manager on another device than the CCU. In this case you " "have to configure the address and credentials to log into the CCU. If you start " "the pmatic Manager on your CCU, you can leave these options empty.") self.write("<table>") self.write("<tr><th>Connect with CCU</th>") self.write("<td>") self.checkbox("ccu_enabled", Config.ccu_enabled) self.write("</td>") self.write("</tr>") self.write("<tr><th>Address</th>") self.write("<td>") self.input("ccu_address", Config.ccu_address) self.write("</td>") self.write("</tr>") self.write("<tr><th>Username</th>") self.write("<td>") self.input("ccu_username", Config.ccu_credentials[0] if Config.ccu_credentials else "") self.write("</td>") self.write("</tr>") self.write("<tr><th>Password</th>") self.write("<td>") self.password("ccu_password") self.write("</td>") self.write("</tr>") self.write("</table>") self.h3("fritz!Box Connection") self.p("If you like to connect with your fritz!Box to detect presence of your " "users, you may enable this connection. Eventually you will have to " "configure the address and credentials of your fritz!Box here.") self.write("<table>") self.write("<tr><th>Connect with fritz!Box</th>") self.write("<td>") self.checkbox("fritzbox_enabled", Config.fritzbox_enabled) self.write("</td>") self.write("</tr>") self.write("<tr><th>Address</th>") self.write("<td>") self.input("fritzbox_address", Config.fritzbox_address) self.write("</td>") self.write("</tr>") self.write("<tr><th>API Port</th>") self.write("<td>") self.input("fritzbox_port", Config.fritzbox_port) self.write("</td>") self.write("</tr>") self.write("<tr><th>Username</th>") self.write("<td>") self.input("fritzbox_username", Config.fritzbox_username) self.write("</td>") self.write("</tr>") self.write("<tr><th>Password</th>") self.write("<td>") self.password("fritzbox_password") self.write("</td>") self.write("</tr>") self.write("</table>") self.h3("Pushover Notifications") self.p("If you like to use pushover notifications, you need to configure your " "credentials here in order to make them work.") self.write("<table>") self.write("<tr><th>API Token</th>") self.write("<td>") self.input("pushover_api_token", Config.pushover_api_token) self.write("</td>") self.write("</tr>") self.write("<tr><th>User/Group Token</th>") self.write("<td>") self.input("pushover_user_token", Config.pushover_user_token) self.write("</td>") self.write("</tr>") self.write("</table>") self.submit("Save configuration", "save_config") self.end_form() self.write("</div>\n") class PageEventLog(HtmlPageHandler, utils.LogMixin): url = "event_log" def title(self): return "Events received from the CCU" def process(self): self.h2("Events received from the CCU") self.p("This page shows the last %d events received from the CCU. These are events " "which you can register your pmatic scripts on to be called once such an event " "is received." % Config.event_history_length) if not Config.ccu_enabled: self.info("The connection with the CCU is disabled. In this mode the manager " "can not receive any event from the CCU. To be able to receive " "events, you need to configure the CCU address, credentials and " "enable the CCU connection.") return if not self._manager.event_manager.initialized: self.info("The event processing has not been initialized yet. Please come back " "in one or two minutes.") return self.p("Received <i>%d</i> events in total since the pmatic manager has been started." % self._manager.event_history.num_events_total) self.write("<table>") self.write("<tr><th>Time</th><th>Device</th><th>Channel</th><th>Parameter</th>" "<th>Event-Type</th><th>Value</th>") self.write("</tr>") for event in reversed(self._manager.event_history.events): #"time" : updated_param.last_updated, #"time_changed" : updated_param.last_changed, #"param" : updated_param, #"value" : updated_param.value, #"formated_value" : "%s" % updated_param, param = event["param"] if event["time"] == event["time_changed"]: ty = "changed" else: ty = "updated" self.write("<tr>") self.write("<td>%s</td>" % time.strftime("%Y-%m-%d %H:%M:%S", utils.localtime(event["time"], Config.timezone))) self.write("<td>%s (%s)</td>" % (self.escape(param.channel.device.name), self.escape(param.channel.device.address))) self.write("<td>%s</td>" % self.escape(param.channel.name)) self.write("<td>%s</td>" % self.escape(param.name)) self.write("<td>%s</td>" % self.escape(ty)) self.write("<td>%s (Raw value: %s)</td>" % (self.escape(event["formated_value"]), self.escape(event["value"]))) self.write("</tr>") self.write("</table>") class PageSchedule(HtmlPageHandler, utils.LogMixin): url = "schedule" def title(self): return "Schedule your pmatic Scripts" def action(self): self.ensure_password_is_set() action = self._vars.getvalue("action") if action == "delete": return self._handle_delete() elif action == "start": return self._handle_start() def _handle_delete(self): schedule_id = self._vars.getvalue("schedule_id") if not schedule_id: raise PMUserError("You need to provide a schedule to delete.") schedule_id = int(schedule_id) if not self._manager.scheduler.exists(schedule_id): raise PMUserError("This schedule does not exist.") if not self.confirm("Do you really want to delete this schedule?"): return False schedule = self._manager.scheduler.get(schedule_id) schedule.remove() self.success("The schedule has been deleted.") def _handle_start(self): schedule_id = self._vars.getvalue("schedule_id") if not schedule_id: raise PMUserError("You need to provide a schedule to start.") schedule_id = int(schedule_id) if not self._manager.scheduler.exists(schedule_id): raise PMUserError("This schedule does not exist.") schedule = self._manager.scheduler.get(schedule_id) if not schedule.script_exists: raise PMUserError("The configured script does not exist.") schedule.execute() self._manager.scheduler.save_state() self.success("The schedule has been started.") def process(self): self.ensure_password_is_set() self.h2("Schedule your pmatic Scripts") self.p("This page shows you all currently existing script schedules. A schedule controls " "in which situations a script is being executed.") self.button("tasks", "Add schedule", "/add_schedule") self.write("<br>") self.write("<br>") self.write("<table>") self.write("<tr><th>Actions</th><th>Name</th><th>On/Off</th><th>Conditions</th>" "<th>Script</th><th>Last triggered</th><th>Currently running</th>") self.write("</tr>") for schedule in sorted(self._manager.scheduler.schedules, key=lambda s: s.name): self.write("<tr>") self.write("<td>") self.icon_button("edit", "/edit_schedule?schedule_id=%d" % schedule.id, "Edit this schedule") self.icon_button("trash", self.action_url("?action=delete&schedule_id=%d" % schedule.id), "Delete this schedule") if schedule.last_triggered: self.icon_button("file-text-o", "/schedule_result?schedule_id=%d" % schedule.id, "Show the last schedule run result") if not schedule.is_running: self.icon_button("bolt", self.action_url("?action=start&schedule_id=%d" % schedule.id), "Manually trigger this schedule now") self.write("</td>") self.write("<td>%s</td>" % self.escape(schedule.name)) self.write("<td>") if schedule.disabled: self.icon("close", "The schedule is currently disabled.") else: self.icon("check", "The schedule is currently enabled.") self.write("</td>") self.write("<td>") for condition in sorted(schedule.conditions.values(), key=lambda c: c.id): self.write(condition.display()+"<br>") self.write("</td>") self.write("<td>%s</td>" % self.escape(schedule.script)) last_triggered = schedule.last_triggered if last_triggered: last_triggered = time.strftime("%Y-%m-%d %H:%M:%S", utils.localtime(last_triggered, Config.timezone)) else: last_triggered = "<i>Not triggered yet.</i>" self.write("<td>%s</td>" % last_triggered) self.write("<td>%s</td>" % ("running" if schedule.is_running else "not running")) self.write("</tr>") self.write("</table>") class PageEditSchedule(HtmlPageHandler, utils.LogMixin): url = "edit_schedule" def __init__(self, *args): self._schedule = None super(PageEditSchedule, self).__init__(*args) def _get_mode(self): return "edit" def _get_schedule(self): schedule_id = self._vars.getvalue("schedule_id") if schedule_id is None: raise PMUserError("You need to provide a <tt>schedule_id</tt>.") schedule_id = int(schedule_id) if not self._manager.scheduler.exists(schedule_id): raise PMUserError("The schedule you are trying to edit does not exist.") return self._manager.scheduler.get(schedule_id) def _get_schedule_copy(self): orig_schedule = self._get_schedule() schedule = Schedule(self._manager) schedule.from_config(orig_schedule.to_config()) schedule.from_state(orig_schedule.to_state()) return schedule def _get_condition_types(self): types = [] for subclass in Condition.types(): types.append((subclass.type_name, subclass.type_title)) return types def _set_submitted_vars(self, schedule, submit): if self._vars.getvalue("submitted") == "1": # submitted for reload or saving! schedule.name = self._vars.getvalue("name") if submit and not schedule.name: raise PMUserError("You have to provide a name.") schedule.keep_running = self.is_checked("keep_running") schedule.run_inline = self.is_checked("run_inline") schedule.disabled = self.is_checked("disabled") script = self._vars.getvalue("script") if script and script not in self._manager.get_scripts(): raise PMUserError("The given script does not exist.") if submit and not script: raise PMUserError("You have to select a script.") schedule.script = script old_conditions = schedule.conditions.copy() schedule.clear_conditions() num_conditions = int(self._vars.getvalue("num_conditions")) has_error = False for num in range(num_conditions): condition_id = int(self._vars.getvalue("cond_%d_id" % num)) condition_type = self._vars.getvalue("cond_%d_type" % num) if not condition_type: continue condition = self._new_condition(condition_type) if condition_id != -1: condition.id = condition_id if condition.id in old_conditions: condition.from_state(old_conditions[condition.id].to_state()) schedule.add_condition(condition) try: condition.set_submitted_vars(self, "cond_%d_" % num) except PMUserError as e: if submit: self.error("Condition #%d: %s" % (num+1, e)) has_error = True if submit and has_error: raise PMUserError("An error occured, please correct this.") def _new_condition(self, condition_type): cls = Condition.get(condition_type) if not cls: raise PMUserError("Invalid condition type \"%s\" given." % condition_type) return cls(self._manager) def action(self): self.ensure_password_is_set() # copy the object for editing so that eventual validation issues don't affect # already existing schedules in the meantime. Only copy back when no validation # issues occur. orig_schedule = self._get_schedule() self._schedule = self._get_schedule_copy() self._set_submitted_vars(self._schedule, submit=True) orig_schedule.from_config(self._schedule.to_config()) orig_schedule.from_state(self._schedule.to_state()) orig_schedule.save() self._schedule = orig_schedule self.success("The schedule has been saved. Opening the schedule list now.") self.redirect(2, "/schedule") def title(self): return "Edit Script Schedule" def process(self): self.ensure_password_is_set() self.h2(self.title()) mode = self._get_mode() if not self._schedule: self._schedule = self._get_schedule_copy() # on page update e.g. when select field was changed self._set_submitted_vars(self._schedule, submit=False) schedule = self._schedule self.begin_form() if mode == "edit": self.hidden("schedule_id", str(schedule.id)) self.hidden("submitted", "1") self.write("<table>") self.write("<tr><th>Name</th><td>") self.input("name", schedule.name) self.write("</td></tr>") self.write("<tr><th>Keep running" "<p>Keep the script running and restart it automatically after it has been " "started once. <i>Note:</i> If the script is respawning too often, it's " "restarts will be delayed.</p></th><td>") self.checkbox("keep_running", schedule.keep_running) self.write("</td></tr>") self.write("<tr><th>Disabled" "<p>You can use this option to disable future executions of this " "schedule till you re-enable it.</p></th><td>") self.checkbox("disabled", schedule.disabled) self.write("</td></tr>") self.write("<tr><th>Run inline" "<p>Execute the script inline the manager process with access to the managers " "CCU object. Use this if your scripts need access to CCU provided information " "like devices, channels or values. If you uncheck this, your script will be " "started as separate process.<br>You can use your regular, unmodified pmatic " "scripts with this. If you create a CCU() object in your code, it will not " "create a new object but use the pmatic managers object which is already " "initialized then.</p></th><td>") self.checkbox("run_inline", schedule.run_inline) self.write("</td></tr>") scripts = list(self._manager.get_scripts()) if schedule.script != "" and schedule.script not in scripts: scripts.append(schedule.script) self.write("<tr><th>Script to execute</th><td>") self.select("script", sorted([ (s, s) for s in scripts ]), schedule.script) self.write("</td></tr>") self.write("</table>") self.h3("Conditions") self.p("Here you need to specify at least one condition for the script to be started. " "If you create multiple conditions, each of the conditions issues the script on " "it's own.") self.write("<table>") self.write("<tr>") self.write("<th class=\"short\">#</th>") self.write("<th>Type</th>") self.write("<th>Parameters</th>") self.write("</tr>") self.hidden("num_conditions", str(len(schedule.conditions)+1)) choices = sorted(schedule.conditions.values(), key=lambda c: c.id) \ + [Condition(self._manager)] for num, condition in enumerate(choices): varprefix = "cond_%d_" % num self.hidden(varprefix+"id", "%d" % (condition.id if condition.id != None else -1)) self.write("<tr>") self.write("<td>#%d</td>" % (num+1)) self.write("<td>") self.write("Execute script ") self.select(varprefix+"type", self._get_condition_types(), deflt=condition.type_name, onchange="this.form.submit()") self.write("</td>") self.write("<td>") condition.input_parameters(self, varprefix) self.write("</td>") self.write("</tr>") self.write("</table>") self.submit("Save", "save") self.end_form() class PageAddSchedule(PageEditSchedule, PageHandler): url = "add_schedule" def _get_mode(self): return "new" def _get_schedule(self): return Schedule(self._manager) def title(self): return "Add Script Schedule" class PageScheduleResult(PageHandler, AbstractScriptProgressPage, utils.LogMixin): url = "schedule_result" def title(self): return "Scheduled Script Result" def _get_schedule(self): schedule_id = self._vars.getvalue("schedule_id") if schedule_id is None: raise PMUserError("You need to provide a <tt>schedule_id</tt>.") schedule_id = int(schedule_id) if not self._manager.scheduler.exists(schedule_id): raise PMUserError("The schedule you are trying to edit does not exist.") return self._manager.scheduler.get(schedule_id) def _abort_url(self): schedule_id = int(self._vars.getvalue("schedule_id")) return self.action_url("/schedule_result?schedule_id=%d&action=abort" % schedule_id) def action(self): self.ensure_password_is_set() action = self._vars.getvalue("action") if action == "abort": schedule = self._get_schedule() self._set_runner(schedule.runner) self._handle_abort() def process(self): self.ensure_password_is_set() schedule = self._get_schedule() self._set_runner(schedule.runner) self._progress() class PageState(HtmlPageHandler, utils.LogMixin): url = "state" def title(self): return "pmatic Manager State" def process(self): self.h2(self.title()) self.p("This page shows you some details about the overall state of the pmatic Manager.") self.h3("General") self.write("<table class=\"info\">") vmsize, vmrss = self._current_memory_usage() self.write("<tr><th>Memory Usage (Virtual)</th>") self.write("<td>%0.2f MB</td></tr>" % (vmsize/1024.0/1024.0)) self.write("<tr><th>Memory Usage (Resident)</th>") self.write("<td>%0.2f MB</td></tr>" % (vmrss/1024.0/1024.0)) self.write("</table>") self.h3("CCU Connection") self.write("<table class=\"info\">") self.write("<tr><th>Current State</th>") if not Config.ccu_enabled: cls = "state1" text = "Disabled (by command line or configuration)" elif self._manager.ccu.api.initialized: cls = "state0" text = "Initialized" else: cls = "state2" text = "Not initialized (%s)" % (self._manager.ccu.api.fail_reason or "No error") self.write("<td class=\"%s\">%s" % (cls, text)) self.write("</td></tr>") if self._manager.ccu.api.initialized: devices = self._manager.ccu.devices else: devices = [] self.write("<tr><th>Number of Devices</th>") self.write("<td>%s</td></tr>" % len(devices)) num_channels = sum([ len(device.channels) for device in devices ]) self.write("<tr><th>Number of Channels</th>") self.write("<td>%s</td></tr>" % num_channels) self.write("</table>") self.h3("CCU Event Processing") self.write("<table class=\"info\">") self.write("<tr><th>Current State</th>") if not Config.ccu_enabled: cls = "state0" text = "Disabled (because CCU connection is disabled)" elif self._manager.event_manager.initialized: cls = "state0" text = "Initialized" else: cls = "state2" text = "Not initialized (%s)" % (self._manager.event_manager.fail_reason or "No error") self.write("<td class=\"%s\">%s" % (cls, text)) self.write("</td></tr>") self.write("<tr><th>Number of Events</th>") self.write("<td>%s</td></tr>" % self._manager.event_history.num_events_total) self.write("<tr><th>Time of Last Event</th>") self.write("<td>%s</td></tr>" % time.strftime("%Y-%m-%d %H:%M:%S", utils.localtime(self._manager.event_history.last_event_time, Config.timezone))) self.write("</table>") # FIXME: Care about too larget logfiles #self.h2("pmatic Manager Logfile") #self.write("<pre id=\"logfile\">") #for line in open(Config.log_file): # self.write_text(line.decode("utf-8")) #self.write("</pre>") #self.js("document.getElementById(\"logfile\").scrollTop = " # "document.getElementById(\"logfile\").scrollHeight;") def _current_memory_usage(self): """Returns the current vm and resizent usage in bytes""" vmsize, vmrss = 0, 0 for line in open('/proc/self/status'): if line.startswith("VmSize:"): vmsize = int(line.split()[1])*1024 elif line.startswith("VmRSS"): vmrss = int(line.split()[1])*1024 return vmsize, vmrss class Page404(HtmlPageHandler, utils.LogMixin): url = "404" def _send_http_header(self): self._start_response(self._http_status(404), self._http_headers) def title(self): return "404 - Page not Found" def process(self): self.p("The requested page could not be found.") @contextlib.contextmanager def catch_stdout_and_stderr(out=None): old_out, old_err = sys.stdout, sys.stderr if out is None: out = StringIO() sys.stdout = out sys.stderr = out yield out sys.stdout, sys.stderr = old_out, old_err class ScriptRunner(threading.Thread, utils.LogMixin): def __init__(self, manager, script, run_inline=False, keep_running=False): threading.Thread.__init__(self) self.daemon = True self._manager = manager self.script = script self.run_inline = run_inline self.keep_running = keep_running self.restarted = None self.output = StringIO() self.exit_code = None self.started = time.time() self.finished = None self._is_running = False self._p = None def run(self): while True: try: self.logger.info("Starting script (%s): %s", "inline" if self.run_inline else "external", self.script) script_path = os.path.join(Config.script_path, self.script) self._is_running = True if self.run_inline: exit_code = self._run_inline(script_path) else: exit_code = self._run_external(script_path) self.exit_code = exit_code self.logger.info("Finished (Exit-Code: %d).", self.exit_code) except Exception: self.logger.error("Failed to execute %s", self.script, exc_info=True) self.logger.debug(traceback.format_exc()) finally: self._is_running = False self.finished = time.time() # Either execute the script once or handle the keep_running option. # when the script is restarting too fast, delay it for some time. if not self.keep_running: break elif self.restarted != None and time.time() - self.restarted < 5: delay = 30 - (time.time() - self.restarted) self.logger.info("Last restart is less than 5 seconds ago, delaying restart for " "%d seconds (\"Keep running\" is enabled)." % delay) time.sleep(delay) self.restarted = time.time() else: self.logger.info("Restarting the script (\"Keep running\" is enabled)") self.restarted = time.time() def _run_external(self, script_path): env = os.environ.copy() env["PYTHONIOENCODING"] = "utf-8" self._p = subprocess.Popen(["/usr/bin/env", "python", "-u", script_path], shell=False, cwd="/", env=env, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, close_fds=True) while True: nextline = self._p.stdout.readline().decode("utf-8") if nextline == "" and self._p.poll() is not None: break self.output.write(nextline) exit_code = self._p.poll() return exit_code def _run_inline(self, script_path): exit_code = 0 try: # Make the ccu object available globally so that the __new__ method # of the CCU class can use and return this instead of creating a new # CCU object within the pmatic scripts. if self._manager.ccu: builtins.manager_ccu = self._manager.ccu # Catch stdout and stderr of the executed python script and write # it to the same StringIO() object. with catch_stdout_and_stderr(self.output): script_globals = {} # would use execfile() but it's not available in Python 3.x exec(compile(open(script_path, "rb").read(), script_path, 'exec'), script_globals) except SystemExit as e: exit_code = e.code except Exception as e: self.logger.error("Exception in inline script %s", script_path, exc_info=True) self.output.write("%s" % e) self.output.write("%s" % traceback.format_exc()) exit_code = 1 return exit_code @property def is_running(self): return self.is_alive() and self._is_running @property def abortable(self): return not self.run_inline def abort(self): if self.abortable: self._abort_external() # FIXME: Set self.exit_code, self.output and self.finished()? def _abort_external(self): if not self._p: return self._p.terminate() # And wait for the termination (at least shortly) timer = 10 while timer > 0 and self._p.poll() is None: timer -= 1 time.sleep(0.1) class PMServerHandler(wsgiref.simple_server.ServerHandler, utils.LogMixin): server_software = 'pmatic-manager' # Hook into ServerHandler to be able to catch exceptions about disconnected clients def finish_response(self): try: super(PMServerHandler, self).finish_response() except socket.error as e: # Client disconnected while answering it's request. if e.errno != 32: raise def send_preamble(self): try: super(PMServerHandler, self).send_preamble() except socket.error as e: # Client disconnected while answering it's request. if e.errno != 32: raise def log_exception(self, exc_info): self.logger.error("Unhandled exception", exc_info=True) # Found no elegant way to patch it. Sorry. wsgiref.simple_server._ServerHandler = wsgiref.simple_server.ServerHandler wsgiref.simple_server.ServerHandler = PMServerHandler class Manager(wsgiref.simple_server.WSGIServer, utils.LogMixin): def __init__(self, address): wsgiref.simple_server.WSGIServer.__init__( self, address, RequestHandler) self.set_app(self._request_handler) self.ccu = None self.event_manager = EventManager(self) self.event_history = EventHistory() self.scheduler = Scheduler(self) self.residents = ManagerResidents(self) def init_scheduler(self): self.scheduler.load() self.scheduler.start() # FIXME: When running the manager from remote: # - Handle pmatic.exceptions.PMConnectionError correctly # The connection should be retried later and all depending # code needs to be able to deal with an unconnected manager. # - Handle pmatic.exceptions.PMException: # [session_login] JSONRPCError: too many sessions (501) def init_ccu(self): """This method initializes the manager global CCU object. It is called during startup of the manager, but also when the CCU related configuration changed to apply the changes. """ if self.ccu: self.ccu.close() self.ccu = None if not Config.ccu_enabled: self.logger.info("Connection with CCU is disabled") return self.logger.info("Initializing connection with CCU...") try: self.ccu = pmatic.CCU(address=Config.ccu_address, credentials=Config.ccu_credentials) except PMException as e: self.logger.error("Failed to initialize CCU connection: %s", e) return self._patch_manager_residents() self._register_for_ccu_events() # Update the CCU dependent conditions self.scheduler.update_conditions() @property def ccu_initialized(self): """Whether or not a connection with the CCU has been initialized.""" return self.ccu != None and self.ccu.api.initialized def _patch_manager_residents(self): """Patches the manager specific subclass of :class:`pmatic.residents.Residents` into the CCU object. This prevents loading the default `Residents` class.""" self.ccu._residents = self.residents def _register_for_ccu_events(self): if self.event_manager.is_alive(): self.event_manager.stop() self.event_manager = None if not Config.ccu_enabled: return self.event_manager = EventManager(self) self.logger.info("Registering for CCU events...") self.event_manager.start() def _request_handler(self, environ, start_response): # handler_class may be any subclass of PageHandler handler_class = PageHandler.get(environ) page = handler_class(self, environ, start_response) return page.process_page() def process_request(self, request, client_address): try: super(Manager, self).process_request(request, client_address) except socket.error as e: if e.errno == 32: self.logger.debug("%s: Client disconnected while answering it's request.", client_address, exc_info=True) else: raise def daemonize(self, user=0, group=0): # do the UNIX double-fork magic, see Stevens' "Advanced # Programming in the UNIX Environment" for details (ISBN 0201563177) try: pid = os.fork() if pid > 0: # exit first parent sys.exit(0) except OSError as e: sys.stderr.write("Fork failed (#1): %d (%s)\n" % (e.errno, e.strerror)) sys.exit(1) # decouple from parent environment # chdir -> don't prevent unmounting... os.chdir("/") # Create new process group with the process as leader os.setsid() # Set user/group depending on params if group: os.setregid(getgrnam(group)[2], getgrnam(group)[2]) if user: os.setreuid(getpwnam(user)[2], getpwnam(user)[2]) # do second fork try: pid = os.fork() if pid > 0: sys.exit(0) except OSError as e: sys.stderr.write("Fork failed (#2): %d (%s)\n" % (e.errno, e.strerror)) sys.exit(1) sys.stdout.flush() sys.stderr.flush() si = os.open("/dev/null", os.O_RDONLY) so = os.open("/dev/null", os.O_WRONLY) os.dup2(si, 0) os.dup2(so, 1) os.dup2(so, 2) os.close(si) os.close(so) self.logger.debug("Daemonized with PID %d.", os.getpid()) def register_signal_handlers(self): signal.signal(signal.SIGINT, self.signal_handler) signal.signal(signal.SIGQUIT, self.signal_handler) signal.signal(signal.SIGTERM, self.signal_handler) def signal_handler(self, signum, _unused_stack_frame): raise SignalReceived(signum) def get_scripts(self): if not os.path.exists(Config.script_path): raise PMUserError("The script directory %s does not exist." % Config.script_path) for dirpath, _unused_dirnames, filenames in os.walk(Config.script_path): if dirpath == Config.script_path: relpath = "" else: relpath = dirpath[len(Config.script_path)+1:] for filename in filenames: filepath = os.path.join(dirpath, filename) if os.path.isfile(filepath) and filename[0] != ".": if relpath: yield os.path.join(relpath, filename) else: yield filename class RequestHandler(wsgiref.simple_server.WSGIRequestHandler, utils.LogMixin): def log_message(self, fmt, *args): self.logger.debug("%s %s", self.client_address[0], fmt%args) class EventManager(threading.Thread, utils.LogMixin): """Manages the CCU event handling for the the Manager().""" def __init__(self, manager): threading.Thread.__init__(self) self._manager = manager self.daemon = True self._is_initialized = False self._fail_exc = None self._terminate = threading.Event() def run(self): while not self._terminate.is_set(): if not self.initialized: self._fail_exc = None try: self._do_register_for_ccu_events() except Exception as e: self._fail_exc = e self.logger.error("Error in EventManager (%s). Restarting in 10 seconds.", e) self.logger.debug("Exception:", exc_info=True) time.sleep(10) else: self._terminate.wait() def _do_register_for_ccu_events(self): self._manager.ccu.events.init() self.logger.debug("events initialized") self._manager.ccu.events.on_value_updated(self._on_value_updated) self.logger.info("Event processing initialized.") self._is_initialized = True def _on_value_updated(self, event_listener, updated_param): # It seem to happen that we receive events for non readable values. e.g. # Exception in XML-RPC call event('pmatic-0', 'LEQ1237196:1', 'INSTALL_TEST', True): # ... # PMException: Exception in callback (value_updated - # <bound method EventManager._on_value_updated of # <EventManager(Thread-3, started daemon -1242856336)>>): The value can not be read. # Ignore non readable value updates here if not updated_param.readable: return self._manager.event_history.add_event({ "time" : updated_param.last_updated, "time_changed" : updated_param.last_changed, "param" : updated_param, "value" : updated_param.value, "formated_value" : "%s" % updated_param, }) self._manager.scheduler.queue_device_event(updated_param, updated_param.last_updated, updated_param.last_changed, updated_param.value) @property def initialized(self): return self._is_initialized @property def fail_reason(self): return self._fail_exc def stop(self): self._terminate.set() self.join() class EventHistory(object): def __init__(self): self._events = [] self._num_events_total = 0 self._last_event_time = None def add_event(self, event_dict): self._last_event_time = time.time() self._num_events_total += 1 self._events.append(event_dict) if len(self._events) > Config.event_history_length: self._events.pop(0) @property def events(self): return self._events @property def num_events_total(self): return self._num_events_total @property def last_event_time(self): return self._last_event_time class ManagerResidents(Residents, utils.PersistentConfigMixin, utils.PersistentStateMixin, utils.LogMixin): _name = "residents" def __init__(self, manager): super(ManagerResidents, self).__init__() self._manager = manager self.load() def load(self): self.load_config(default={}) self.load_state(default=[]) def save(self): self.save_state() self.save_config() @property def config_file(self): return os.path.join(Config.config_path, "manager.residents") @property def state_file(self): return os.path.join(Config.state_path, "manager.residents") def update(self): super(ManagerResidents, self).update() self.save_state() def _add(self, r): r.on_presence_changed(self._manager.scheduler.handle_presence_changed) super(ManagerResidents, self)._add(r) class Scheduler(threading.Thread, utils.LogMixin, utils.PersistentConfigMixin, utils.PersistentStateMixin): _name = "schedules" def __init__(self, manager): threading.Thread.__init__(self) self.daemon = True self._manager = manager self._schedules = {} self._device_event_queue = queue.Queue() self._on_startup_executed = False self._on_ccu_init_executed = False self._next_presence_update = None utils.PersistentConfigMixin.__init__(self) def run(self): self.logger.info("Starting Scheduler..") while True: try: if not self._on_startup_executed: # Run on startup scripts for schedule in self._schedules_with_condition_type(ConditionOnStartup): self.execute(schedule) self._on_startup_executed = True if not self._on_ccu_init_executed and self._manager.event_manager.initialized: # Run on ccu init scripts for schedule in self._schedules_with_condition_type(ConditionOnCCUInitialized): self.execute(schedule) self._on_ccu_init_executed = True to_execute = set([]) to_execute.update(self._check_timed_schedules()) to_execute.update(self._check_device_event_schedules()) self._execute_matched_schedules(to_execute) self._execute_presence_update() except Exception: self.logger.error("Exception in Scheduler", exc_info=True) # FIXME: Optimization: Don't wake up every second. Sleep till next scheduled event. time.sleep(1) self.logger.info("Stopped Scheduler") def _execute_presence_update(self): """Updates the presence information of residents in the configured interval. When no resident is configured, this method is doing nothing.""" if not self._manager.residents.enabled: self.logger.debug("Not updating presence information (not enabled)") return if self._next_presence_update == None or self._next_presence_update < time.time(): self.logger.debug("Updating presence information") self._manager.residents.update() self._next_presence_update = time.time() + Config.presence_update_interval self.save_state() def _check_timed_schedules(self): """Checks all configured timed schedules whether or not the next occurance has been reached. Then, if reached, the schedule is put to a list and the next occurance is calculated. The list of all schedules is then returned. """ to_execute = set([]) # FIXME: Optimize schedule/condition handling for schedule in self.enabled_schedules: for condition in schedule.conditions.values(): if isinstance(condition, ConditionOnTime): if condition.next_time <= time.time(): this_time = condition.next_time condition.calculate_next_time() to_execute.add(schedule) self.logger.debug("Timed condition matched: %d. Next will be: %d.", this_time, condition.next_time) #else: # self.logger.debug("Timed condition is not due yet (%d <= %d)", # condition.next_time, time.time()) return to_execute def _check_device_event_schedules(self): """Processes all received device events from the queue and checks whether or not a schedule has to be executed based on them.""" to_execute = set([]) schedules = self._schedules_with_device_conditions() while True: try: device_event = self._device_event_queue.get_nowait() except queue.Empty: break # finished for schedule in schedules: matched = False for condition in schedule.conditions.values(): if isinstance(condition, ConditionOnDeviceEvent) \ or isinstance(condition, ConditionOnDevicesOfTypeEvent): if condition.matches_device_event(device_event): #self.logger.info("Device condition matched: %r" % (device_event, )) matched = True break #else: # self.logger.debug("Condition not matched: %r" % (device_event, )) if matched: #self.logger.info("added to execute: %r" % schedule) to_execute.add(schedule) return to_execute def _schedules_with_device_conditions(self): schedules = set([]) schedules.update(self._schedules_with_condition_type(ConditionOnDeviceEvent)) schedules.update(self._schedules_with_condition_type(ConditionOnDevicesOfTypeEvent)) return schedules def _execute_matched_schedules(self, to_execute): for schedule in to_execute: self.execute(schedule) # Save the state because next execution time has been updated when at least # one timed condition matched. if to_execute: self.save_state() # FIXME: Optimize schedule/condition handling def _schedules_with_condition_type(self, cls): for schedule in self.enabled_schedules: matched = False for condition in schedule.conditions.values(): if isinstance(condition, cls): matched = True break if matched: yield schedule # FIXME: Optimize schedule/condition handling def handle_presence_changed(self, resident): """Checks all configured resident presence schedules whether or not they are fitting this just occured event. If so the schedule is executed. """ for schedule in self.enabled_schedules: for condition in schedule.conditions.values(): if isinstance(condition, ConditionOnResidentPresence): if condition.resident == resident: event_type = condition.event_type if event_type == "change" \ or (resident.present and event_type == "arrival") \ or (not resident.present and event_type == "departure"): self.logger.debug("Presence condition matched: %s/%s" % (resident.name, event_type)) self.execute(schedule) def execute(self, schedule): """Executes a script schedule. This is normally issued by the Scheduler itself when it detected that a condition of a schedule matched. Each of the executed scripts are started in a separate ScriptRunner object which is managing the executed script, collecting it's output and restarts the script when it it is configured to be kept running and terminates. The script runner is connected with the *schedule* object so that the Scheduler knows that the schedule is currently being executed and should not be started a second time in parallel. """ self.logger.info("[%s] Executing script..." % schedule.name) if schedule.is_running: self.logger.info("[%s] Conditions matched, but script was already running.", schedule.name) return if not schedule.script_exists: self.logger.info("[%s] Conditions matched, but script does not exist.", schedule.name) return schedule.execute() # TODO: This could be optimize easily to filter only for relevant updates. # This means only process events for devices that schedules are using. def queue_device_event(self, updated_param, time, time_last_changed, value): """Is used to hand over device events to the script scheduler which is then processing the events in its own thread.""" self._device_event_queue.put_nowait((updated_param, time, time_last_changed, value)) def load(self): self.load_config(default=[]) self.load_state() def save(self): self.save_state() self.save_config() @property def config_file(self): return os.path.join(Config.config_path, "manager.schedules") @property def state_file(self): return os.path.join(Config.state_path, "manager.schedules") @property def enabled_schedules(self): """Return all non disabled schedules.""" for schedule in self._schedules.values(): if not schedule.disabled: yield schedule @property def schedules(self): return self._schedules.values() def exists(self, schedule_id): return schedule_id in self._schedules def get(self, schedule_id): return self._schedules[schedule_id] def clear(self): self._schedules.clear() def add(self, schedule): if schedule.id is None: schedule.id = self._next_id() self._schedules[schedule.id] = schedule def _next_id(self): return max([-1] + list(self._schedules.keys())) + 1 def remove(self, schedule_id): """Removes the schedule with the given *schedule_id* from the Scheduler. Tolerates non existing schedule ids. When a schedule is currently running, it is being terminated (if possible).""" try: schedule = self._schedules.pop(schedule_id) if schedule.is_running: schedule.runner.abort() except KeyError: pass def from_config(self, schedule_config): for schedule_cfg in schedule_config: schedule = Schedule(self._manager) schedule.from_config(schedule_cfg) self.add(schedule) def to_config(self): schedule_config = [] for schedule in self._schedules.values(): schedule_config.append(schedule.to_config()) return schedule_config def from_state(self, state): if state is None: return # on default, do nothing self._next_presence_update = state["next_presence_update"] for schedule, schedule_state in zip(self._schedules.values(), state["schedules"]): schedule.from_state(schedule_state) def to_state(self): return { "next_presence_update" : self._next_presence_update, "schedules" : [ s.to_state() for s in self._schedules.values() ], } def update_conditions(self): for schedule in self._schedules.values(): schedule.update_conditions() class Schedule(object): def __init__(self, manager): self._manager = manager self.id = None self.name = "" self.disabled = False self.keep_running = False self.run_inline = True self.script = "" self.conditions = {} self.last_triggered = None self._runner = None @property def is_running(self): return self._runner and self._runner.is_running @property def script_exists(self): return self.script in self._manager.get_scripts() def execute(self): if not self.script_exists: raise PMException("Not executing since the script does not exist.") self.last_triggered = time.time() # FIXME: Recycle old runner? self._runner = ScriptRunner(self._manager, self.script, self.run_inline, self.keep_running) self._runner.start() @property def runner(self): return self._runner def get_condition(self, condition_id): """Returns the condition with the given id. Raises a ``KeyError`` when the condition not exists.""" return self.conditions[condition_id] def remove_condition(self, condition_id): """Removes the condition with the given id. Tolerates not existing conditions. Always returns ``None``.""" try: del self.conditions[condition_id] except KeyError: pass def add_condition(self, condition): if condition.id == None: condition.id = self._next_condition_id() self.conditions[condition.id] = condition def clear_conditions(self): self.conditions.clear() def _next_condition_id(self): return max([-1] + list(self.conditions.keys())) + 1 def update_conditions(self): for condition in self.conditions.values(): condition.from_config(condition.to_config()) def from_config(self, cfg): self.clear_conditions() for key, val in cfg.items(): if key != "conditions": setattr(self, key, val) else: for condition_cfg in val: cls = Condition.get(condition_cfg["type_name"]) if not cls: raise PMUserError("Failed to load condition type: %s" % condition_cfg["type_name"]) condition = cls(self._manager) condition.from_config(condition_cfg) self.add_condition(condition) def to_config(self): return { "id" : self.id, "name" : self.name, "disabled" : self.disabled, "keep_running" : self.keep_running, "run_inline" : self.run_inline, "script" : self.script, "conditions" : [ c.to_config() for c in self.conditions.values() ], } def from_state(self, state): for key, val in state.items(): if key not in [ "conditions", "id" ]: setattr(self, key, val) sorted_ids = sorted(self.conditions.keys()) for num, condition_state in enumerate(state["conditions"]): # 0.3 stored it as a list without ids. Match by order of conditions if "id" not in condition_state: condition_id = sorted_ids[num] else: condition_id = condition_state["id"] condition = self.conditions.get(condition_id) if condition: condition.from_state(condition_state) def to_state(self): return { "id" : self.id, "last_triggered" : self.last_triggered, "conditions" : [ c.to_state() for c in self.conditions.values() ], } def save(self): self._manager.scheduler.add(self) self._manager.scheduler.save() def remove(self): self._manager.scheduler.remove(self.id) self._manager.scheduler.save() class Condition(object): type_name = "" type_title = "" @classmethod def types(cls): return cls.__subclasses__() @classmethod def get(cls, type_name): for subclass in cls.__subclasses__(): if subclass.type_name == type_name: return subclass return None def __init__(self, manager): self.id = None self._manager = manager def from_config(self, cfg): for key, val in cfg.items(): setattr(self, key, val) def to_config(self): return { "id" : self.id, "type_name" : self.type_name, } def from_state(self, cfg): for key, val in cfg.items(): if key != "id": setattr(self, key, val) def to_state(self): return { "id": self.id, } def display(self): return self.type_title def input_parameters(self, page, varprefix): pass def set_submitted_vars(self, page, varprefix): pass def matches_device_event(self, device_event): raise NotImplementedError() class ConditionOnStartup(Condition): type_name = "on_startup" type_title = "on manager startup" def input_parameters(self, page, varprefix): page.write("<i>This condition has no parameters.</i>") class ConditionOnCCUInitialized(Condition): type_name = "on_ccu_initialized" type_title = "on connection with CCU initialized" def input_parameters(self, page, varprefix): page.write("<i>This condition has no parameters.</i>") class DummyDevice(object): """This device object is needed when a device can not be constructed e.g. because the CCU is currently not available.""" def __init__(self, device_address, channel_address, param_id): self.name = device_address self.address = self.name self.channels = [ DummyChannel(channel_address, param_id), ] class DummyChannel(object): """This channel object is needed when a channel can not be constructed e.g. because the CCU is currently not available.""" def __init__(self, channel_address, param_id): self.name = channel_address self.address = self.name self.values = { param_id: DummyParam(param_id), } class DummyParam(object): """This object is needed when a real object can not be constructed e.g. because the CCU is currently not available.""" def __init__(self, param_id): self.id = param_id self.name = self.id class ConditionOnDeviceEvent(Condition, utils.LogMixin): type_name = "on_device_event" type_title = "on single device event" _event_types = [ ("updated", "Value updated"), ("changed", "Value changed"), ] def __init__(self, manager): super(ConditionOnDeviceEvent, self).__init__(manager) self.device = None self.channel = None self.param = None self.event_type = None self._loaded = False def from_config(self, cfg): self.event_type = cfg["event_type"] if not self._manager.ccu_initialized: self.device = DummyDevice(cfg["device_address"], cfg["channel_address"], cfg["param_id"]) self.channel = self.device.channels[0] self.param = list(self.channel.values.values())[0] self.logger.debug("Can not load \"device event\" condition because the " "connection with the CCU is not established. Will retry later.") return self.device = self._manager.ccu.devices.query( device_address=cfg["device_address"]).get(cfg["device_address"]) if not self.device: return try: self.channel = self.device.channel_by_address(cfg["channel_address"]) except KeyError: return self.param = self.channel.values.get(cfg["param_id"]) if not self.param: return self._loaded = True def to_config(self): cfg = super(ConditionOnDeviceEvent, self).to_config() cfg.update({ "device_address" : self.device.address, "channel_address" : self.channel.address, "param_id" : self.param.id, "event_type" : self.event_type, }) return cfg def display(self): txt = super(ConditionOnDeviceEvent, self).display() txt += ": %s, %s, %s, %s" % (self.device.name, self.channel.name, self.param.name, dict(self._event_types)[self.event_type]) if not self._loaded: txt += " (Not connected with CCU. Can not execute this at the moment)" return txt def _device_choices(self): if not self._loaded and self.device: yield self.device.address, self.device.address return for device in self._manager.ccu.devices: yield device.address, "%s (%s)" % (device.name, device.address) def _channel_choices(self): if not self._loaded and self.channel: yield self.channel.address, self.channel.address return if not self.device: return for channel in self.device.channels: yield channel.address, "%s (%s)" % (channel.name, channel.address) def _param_choices(self): if not self._loaded and self.param: yield self.param.id, self.param.id return if not self.channel: return for param_id, param in self.channel.values.items(): yield param_id, "%s (%s)" % (param.name, param_id) def input_parameters(self, page, varprefix): page.write("Device: ") page.select(varprefix+"device_address", sorted(self._device_choices(), key=lambda x: x[1]), self.device and self.device.address, onchange="this.form.submit()") page.write("Channel: ") page.select(varprefix+"channel_address", sorted(self._channel_choices(), key=lambda x: x[1]), self.channel and self.channel.address, onchange="this.form.submit()") page.write("Parameter: ") page.select(varprefix+"param_id", sorted(self._param_choices(), key=lambda x: x[1]), self.param and self.param.id, onchange="this.form.submit()") page.write("Type: ") page.select(varprefix+"event_type", self._event_types, self.event_type) def set_submitted_vars(self, page, varprefix): device_address = page.vars.getvalue(varprefix+"device_address") channel_address = page.vars.getvalue(varprefix+"channel_address") param_id = page.vars.getvalue(varprefix+"param_id") event_type = page.vars.getvalue(varprefix+"event_type") if event_type: if event_type not in dict(self._event_types): raise PMUserError("Invalid event type given.") self.event_type = event_type if not self._manager.ccu_initialized: self.device = DummyDevice(device_address, channel_address, param_id) self.channel = self.device.channels[0] self.param = list(self.channel.values.values())[0] return self._loaded = True if device_address: self.device = self._manager.ccu.devices.query( device_address=device_address).get(device_address) if not self.device: raise PMUserError("Unable to find the given device.") else: raise PMUserError("Please select a device") if channel_address: try: self.channel = self.device.channel_by_address(channel_address) except KeyError: raise PMUserError("Unable to find the given channel.") else: raise PMUserError("Please select a channel") if param_id: self.param = self.channel.values.get(param_id) if not self.param: raise PMUserError("Unable to find the given parameter.") else: raise PMUserError("Please select a parameter") def matches_device_event(self, device_event): updated_param, time, time_last_changed, value = device_event if self.param == updated_param: if self.event_type == "updated": return True elif self.event_type == "changed" and time == time_last_changed: return True return False class ConditionOnDevicesOfTypeEvent(Condition, utils.LogMixin): type_name = "on_devices_type_event" type_title = "on devices of type event" _event_types = [ ("updated", "Value updated"), ("changed", "Value changed"), ] def __init__(self, manager): super(ConditionOnDevicesOfTypeEvent, self).__init__(manager) self.device_type = None self.channel_id = None self.param_id = None self.event_type = None self._loaded = False def from_config(self, cfg): self.event_type = cfg["event_type"] ccu_initialized = self._manager.ccu_initialized if not ccu_initialized or cfg["device_type"] in self._devices_by_type().keys(): self.device_type = cfg["device_type"] if self.device_type is None: return self.channel_id = cfg["channel_id"] # TODO: validate whether or not channel_nr is available for this type if self.channel_id is None: return self.param_id = cfg["param_id"] # TODO: validate whether or not channel_nr is available for this channel if self.param_id is None: return self._loaded = True def _devices_by_type(self): types = {} for device in self._manager.ccu.devices.query(): devices_of_type = types.setdefault(device.type, []) devices_of_type.append(device) return types def to_config(self): cfg = super(ConditionOnDevicesOfTypeEvent, self).to_config() cfg.update({ "device_type" : self.device_type, "channel_id" : self.channel_id, "param_id" : self.param_id, "event_type" : self.event_type, }) return cfg def display(self): txt = super(ConditionOnDevicesOfTypeEvent, self).display() # TODO: Use human friendly representations of self.channel_id, self.param_id txt += ": %s, %s, %s, %s" % (self.device_type, self.channel_id, self.param_id, dict(self._event_types)[self.event_type]) if not self._loaded: txt += " (Not connected with CCU. Can not execute this at the moment)" return txt def _device_choices(self): if not self._loaded and self.device_type: yield self.device_type, self.device_type return for type_name, devices in sorted(self._devices_by_type().items(), key=lambda x: x[1]): device_names = [ "%s" % d.name for d in devices ] yield type_name, "%s (%s)" % (type_name, ", ".join(device_names)) def _channel_choices(self): if not self._loaded and self.channel_id is not None: yield self.channel_id, self.channel_id if not self.device_type: return for channel_index, channels in sorted(self._channels_of_type().items(), key=lambda x: x[1]): channel_names = [ "%s" % c.name for c in channels ] yield channel_index, "%d (%s)" % (channel_index, ", ".join(channel_names)) def _channels_of_type(self): channels = {} devices_of_type = self._devices_by_type()[self.device_type] for device in devices_of_type: for channel in device.channels: channels_by_index = channels.setdefault(channel.index, []) channels_by_index.append(channel) return channels def _param_choices(self): if not self._loaded and self.param_id is not None: return [(self.param_id, self.param_id)] if self.channel_id is None: return [] return sorted(self._params_of_channel(), key=lambda x: x[1]) def _params_of_channel(self): device = self._devices_by_type()[self.device_type][0] channel = device.channels[self.channel_id] for param_id, param in channel.values.items(): yield param_id, param.name def input_parameters(self, page, varprefix): page.write("Device type: ") page.select(varprefix+"device_type", sorted(self._device_choices(), key=lambda x: x[1]), self.device_type, onchange="this.form.submit()") page.write("Channel: ") page.select(varprefix+"channel_id", sorted(self._channel_choices(), key=lambda x: x[1]), self.channel_id, onchange="this.form.submit()") page.write("Parameter: ") page.select(varprefix+"param_id", sorted(self._param_choices(), key=lambda x: x[1]), self.param_id, onchange="this.form.submit()") page.write("Type: ") page.select(varprefix+"event_type", self._event_types, self.event_type) def set_submitted_vars(self, page, varprefix): device_type = page.vars.getvalue(varprefix+"device_type") channel_id = page.vars.getvalue(varprefix+"channel_id") param_id = page.vars.getvalue(varprefix+"param_id") event_type = page.vars.getvalue(varprefix+"event_type") if event_type: if event_type not in dict(self._event_types): raise PMUserError("Invalid event type given.") self.event_type = event_type if not self._manager.ccu_initialized: return self._loaded = True if device_type: if device_type not in self._devices_by_type(): raise PMUserError("Unable to find the given device type.") else: self.device_type = device_type else: raise PMUserError("Please select a device") if channel_id: channel_id = int(channel_id) if channel_id not in dict(self._channels_of_type()): raise PMUserError("Unable to find the given channel.") else: self.channel_id = channel_id else: raise PMUserError("Please select a channel") if param_id: if param_id not in dict(self._params_of_channel()): raise PMUserError("Unable to find the given parameter.") else: self.param_id = param_id else: raise PMUserError("Please select a parameter") def matches_device_event(self, device_event): updated_param, time, time_last_changed, value = device_event channel = updated_param.channel device = channel.device if self.param_id == updated_param.id \ and self.channel_id == channel.index \ and self.device_type == device.type: if self.event_type == "updated": return True elif self.event_type == "changed" and time == time_last_changed: return True return False class ConditionOnTime(Condition): type_name = "on_time" type_title = "based on time" _interval_choices = [ ("timed", "Time interval"), ("daily", "Daily"), ("weekly", "Weekly"), ("monthly", "Monthly"), ] _interval_units = [ ("seconds", "seconds"), ("minutes", "minutes"), ("hours", "hours"), ] def __init__(self, manager): super(ConditionOnTime, self).__init__(manager) self.interval_type = None self.day_of_week = 1 self.day_of_month = 1 self.time_of_day = (13, 00) self.interval_sec = 0 self.interval_unit = None self._next_time = None @property def next_time(self): if self._next_time is None: self.calculate_next_time() return self._next_time def calculate_next_time(self): """From now, calculate the next unix timestamp matching this condition.""" if self.interval_type == "timed": if self._next_time is None: # Execute timed scripts on startup of manager and then in the # configured intervals. Would be good to persist the last execution # one day to prevent too early re-execution after startup. self._next_time = time.time() else: self._next_time = time.time() + self.interval_sec return # Initialize vars to be used as indices for timeparts year, month, mday, hour, minute, second, wday = range(7) now = time.time() # Construct list of time parts for today, using the configured time ref_parts = list(utils.localtime(now, Config.timezone)) ref_parts[hour] = self.time_of_day[0] ref_parts[minute] = self.time_of_day[1] ref_parts[second] = 0 if self.interval_type == "daily": # When todays time is less than the configured time of day, the next # occurance is today at the given time. Otherwise it is tomorrow. ref_ts = time.mktime(tuple(ref_parts)) if now >= ref_ts: ref_ts += 24 * 60 * 60 # tomorrow elif self.interval_type == "weekly": # When current weekday is less than the configured weekday, the next # occurance is in this week, otherwise it is next week. ref_ts = time.mktime(tuple(ref_parts)) days_difference = (self.day_of_week-1) - ref_parts[wday] ref_ts += days_difference * 24 * 60 * 60 if now >= ref_ts: ref_ts += 7 * 24 * 60 * 60 # next week elif self.interval_type == "monthly": # When current day of month is less than the configured day, the next # occurance is in this month, otherwise it is next month. ref_parts[mday] = self.day_of_month ref_ts = time.mktime(tuple(ref_parts)) if now >= ref_ts: # next month if ref_parts[month] == 12: ref_parts[month] = 1 ref_parts[year] += 1 else: ref_parts[month] += 1 ref_ts = time.mktime(tuple(ref_parts)) else: raise NotImplementedError() # Fix eventual timezone changes ref_parts = list(utils.localtime(ref_ts, Config.timezone)) ref_parts[hour] = self.time_of_day[0] ref_parts[minute] = self.time_of_day[1] ref_parts[second] = 0 ref_ts = time.mktime(tuple(ref_parts)) self._next_time = ref_ts def display(self): txt = super(ConditionOnTime, self).display() txt += ": %s" % self.interval_type if self.interval_type == "timed": txt += " each %d %s" % (self._formated_interval_sec(), dict(self._interval_units)[self.interval_unit]) else: if self.interval_type == "weekly": txt += " on day %d of week" % self.day_of_week elif self.interval_type == "monthly": txt += " on day %d of month" % self.day_of_month txt += ", at %02d:%02d o'clock" % self.time_of_day txt += " (Next: %s)" % time.strftime("%Y-%m-%d %H:%M:%S", utils.localtime(self.next_time, Config.timezone)) return txt def from_config(self, cfg): super(ConditionOnTime, self).from_config(cfg) self.time_of_day = tuple(self.time_of_day) if self.interval_type == "timed": if self.interval_sec % 3600 == 0: self.interval_unit = "hours" elif self.interval_sec % 60 == 0: self.interval_unit = "minutes" else: self.interval_unit = "seconds" def to_config(self): cfg = super(ConditionOnTime, self).to_config() cfg.update({ "interval_type" : self.interval_type, "time_of_day" : self.time_of_day, }) if self.interval_type == "timed": cfg["interval_sec"] = self.interval_sec elif self.interval_type == "weekly": cfg["day_of_week"] = self.day_of_week elif self.interval_type == "monthly": cfg["day_of_month"] = self.day_of_month return cfg def to_state(self): state = super(ConditionOnTime, self).to_state() state["_next_time"] = self._next_time return state def input_parameters(self, page, varprefix): page.write("<table><tr><td>") page.write("Interval: ") page.write("</td><td>") page.select(varprefix+"interval_type", self._interval_choices, self.interval_type, onchange="this.form.submit()") if self.interval_type == "timed": page.write("Execute each: ") page.input(varprefix+"interval_inp", "%d" % self._formated_interval_sec(), cls="interval_inp") page.select(varprefix+"interval_unit", self._interval_units, self.interval_unit, onchange="this.form.submit()") elif self.interval_type == "weekly": page.write("Day of week: ") page.input(varprefix+"day_of_week", "%d" % self.day_of_week, cls="day_of_week") elif self.interval_type == "monthly": page.write("Day of month: ") page.input(varprefix+"day_of_month", "%d" % self.day_of_month, cls="day_of_month") page.write("</td></tr>") if self.interval_type != "timed": page.write("<tr><td>") page.write("Time (24h format): ") page.write("</td><td>") page.input(varprefix+"time_of_day", "%02d:%02d" % self.time_of_day, cls="time_of_day") page.write("</td></tr>") page.write("</table>") def _formated_interval_sec(self): val = self.interval_sec if self.interval_unit == "hours": val = val / 3600 elif self.interval_unit == "minutes": val = val / 60 return val def set_submitted_vars(self, page, varprefix): interval_type = page.vars.getvalue(varprefix+"interval_type") if page.is_action() and not interval_type: raise PMUserError("You need to configure an interval.") if interval_type: if interval_type not in dict(self._interval_choices): raise PMUserError("Invalid interval given.") self.interval_type = interval_type if self.interval_type == "timed": self._set_timed_vars(page, varprefix) return self._set_time_of_day(page, varprefix) if self.interval_type == "weekly": self._set_weekly_vars(page, varprefix) elif self.interval_type == "monthly": self._set_monthly_vars(page, varprefix) self.calculate_next_time() def _set_time_of_day(self, page, varprefix): time_of_day = page.vars.getvalue(varprefix+"time_of_day") if not time_of_day: raise PMUserError("You need to provide a time.") time_parts = time_of_day.split(":") if len(time_parts) != 2: raise PMUserError("The time has to be given in <tt>HH:MM</tt> format.") try: time_parts = tuple(map(int, time_parts)) except ValueError: raise PMUserError("The time has to be given in <tt>HH:MM</tt> format.") hours, minutes = time_parts if hours < 0 or hours > 23: raise PMUserError("The hours need to be between 00 and 23.") if minutes < 0 or minutes > 59: raise PMUserError("The minutes need to be between 00 and 59.") self.time_of_day = time_parts def _set_timed_vars(self, page, varprefix): interval_unit = page.vars.getvalue(varprefix+"interval_unit") raw_interval = page.vars.getvalue(varprefix+"interval_inp") if page.is_action(): if not interval_unit: raise PMUserError("You need to configure the interval unit.") if not raw_interval: raise PMUserError("You need to configure the interval.") if raw_interval and interval_unit: if interval_unit not in dict(self._interval_units): raise PMUserError("Invalid interval unit given.") self.interval_unit = interval_unit try: raw_interval = int(raw_interval) except ValueError: raise PMUserError("Invalid interval given.") if self.interval_unit == "hours": interval = raw_interval * 3600 elif self.interval_unit == "minutes": interval = raw_interval * 60 else: interval = raw_interval if interval < 1: raise PMUserError("The shortest interval is one second.") self.interval_sec = interval def _set_weekly_vars(self, page, varprefix): day_of_week = page.vars.getvalue(varprefix+"day_of_week") if page.is_action() and not day_of_week: raise PMUserError("You need to configure the day of the week.") if day_of_week: try: day_of_week = int(day_of_week) except ValueError: raise PMUserError("Invalid day of week given.") if day_of_week < 1 or day_of_week > 7: raise PMUserError("Invalid day of week given. It needs to be given as number " "between 1 and 7.") self.day_of_week = day_of_week def _set_monthly_vars(self, page, varprefix): day_of_month = page.vars.getvalue(varprefix+"day_of_month") if page.is_action() and not day_of_month: raise PMUserError("You need to configure the day of the month.") if day_of_month: try: day_of_month = int(day_of_month) except ValueError: raise PMUserError("Invalid day of month given.") if day_of_month < 1 or day_of_month > 31: raise PMUserError("Invalid day of month given. It needs to be given as number " "between 1 and 31.") self.day_of_month = day_of_month class ConditionOnResidentPresence(Condition): type_name = "on_resident_presence" type_title = "on resident presence" _event_types = [ ("arrival", "Arrival"), ("departure", "Departure"), ("change", "Arrival or departure"), ] def __init__(self, manager): super(ConditionOnResidentPresence, self).__init__(manager) self.resident = None self.event_type = None def display(self): txt = super(ConditionOnResidentPresence, self).display() choices = dict(self._event_types) txt += ": %s of %s" % (choices.get(self.event_type, self.event_type), self.resident.name if self.resident else "UNKNOWN") return txt def from_config(self, cfg): super(ConditionOnResidentPresence, self).from_config(cfg) resident = self._manager.residents.get(cfg["resident_id"]) if resident: self.resident = resident def to_config(self): cfg = super(ConditionOnResidentPresence, self).to_config() cfg.update({ "resident_id" : self.resident.id, "event_type" : self.event_type, }) return cfg def input_parameters(self, page, varprefix): page.write("<table><tr><td>") page.write("Event type: ") page.write("</td><td>") page.select(varprefix+"event_type", self._event_types, self.event_type, onchange="this.form.submit()") page.write("</td></tr>") if not self.event_type: page.write("</table>") return page.write("<tr><td>") page.write("Resident: ") page.write("</td><td>") page.select(varprefix+"resident_id", self._resident_choices(), self.resident and self.resident.id) page.write("</td></tr>") page.write("</table>") def _resident_choices(self): return sorted([ (r.id, r.name) for r in self._manager.residents.residents ], key=lambda r: r[1]) def set_submitted_vars(self, page, varprefix): event_type = page.vars.getvalue(varprefix+"event_type") if page.is_action() and not event_type: raise PMUserError("You need to configure an event type.") if event_type: if event_type not in dict(self._event_types): raise PMUserError("Invalid event type given.") self.event_type = event_type resident_id = page.vars.getvalue(varprefix+"resident_id") if not resident_id: raise PMUserError("You need to choose a resident.") resident_id = int(resident_id) resident = self._manager.residents.get(resident_id) if resident == None: raise PMUserError("Invalid resident given.") self.resident = resident
#!/usr/bin/env python # encoding: utf-8 # # pmatic - Python API for Homematic. Easy to use. # Copyright (C) 2016 Lars Michelsen <lm@larsmichelsen.com> # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along # with this program; if not, write to the Free Software Foundation, Inc., # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # Add Python 3.x behaviour to 2.7 from __future__ import absolute_import from __future__ import division from __future__ import print_function from __future__ import unicode_literals import pytest from pmatic import PMException import pmatic.utils as utils import pmatic.api def test_implicit_remote_init_missing_args(): with pytest.raises(PMException): pmatic.api.init() def test_explicit_remote_init_missing_args(): with pytest.raises(PMException): pmatic.api.init("remote") def test_explicit_local_init_but_remote(): with pytest.raises(PMException): pmatic.api.init("local") def test_explicit_local_enforce(): orig_is_ccu = utils.is_ccu utils.is_ccu = lambda: True API = pmatic.api.init("local") assert isinstance(API, pmatic.api.LocalAPI) utils.is_ccu = orig_is_ccu def test_explicit_wrong_init(): with pytest.raises(PMException): pmatic.api.init("WTF?!") class SpecificAPI(pmatic.api.AbstractAPI): def __init__(self): super(SpecificAPI, self).__init__() self._constructed = True def _get_methods_config(self): return [] def close(self): pass class TestAbstractAPI(object): @pytest.fixture(scope="function") def API(self): return SpecificAPI() def test_replace_wrong_encoded_json(self): assert utils.is_text(pmatic.api.AbstractAPI._replace_wrong_encoded_json("xxxx")) assert pmatic.api.AbstractAPI._replace_wrong_encoded_json("xxxx") == "xxxx" assert pmatic.api.AbstractAPI._replace_wrong_encoded_json("\\{\n \\[\n \\/\n") \ == "{\n [\n /\n" assert pmatic.api.AbstractAPI._replace_wrong_encoded_json("{\\{") == "{{" def test_invalid_response_handling(self, API, monkeypatch): with pytest.raises(PMException) as e: API._parse_api_response("ding", {}, "{]") assert "Failed to parse response" def call_rega_present(method_name_int, **kwargs): # pylint:disable=unused-argument if method_name_int == "rega_is_present": return True monkeypatch.setattr(API, "_call", call_rega_present) with pytest.raises(PMException) as e: API._parse_api_response("dingdong", {}, "{\"error\": {\"code\": 501, \"name\": \"xxx\", \"message\": \"asd\"}}") assert "[dingdong] xxx: asd" in str(e) def call_rega_not_present(method_name_int, **kwargs): # pylint:disable=unused-argument if method_name_int == "rega_is_present": return False monkeypatch.setattr(API, "_call", call_rega_not_present) with pytest.raises(PMException) as e: API._parse_api_response("dingdong", {}, "{\"error\": {\"code\": 501, \"name\": \"xxx\", \"message\": \"asd\"}}") assert "the CCU has just been started" in str(e) def test_invalid_api_call(self, API, monkeypatch): def call(method_name_int, **kwargs): # pylint:disable=unused-argument API._get_method(method_name_int) monkeypatch.setattr(API, "_call", call) with pytest.raises(PMException) as e: API.dingdong_piff() assert "is not a valid method" in str(e) def test_del(self, API, monkeypatch): monkeypatch.setattr(API, "close", lambda: pmatic.api.AbstractAPI.close(API)) with pytest.raises(NotImplementedError): API.__del__() def test_to_internal_name(self, API): assert API._to_internal_name("Interface.activateLinkParamset") \ == "interface_activate_link_paramset" assert API._to_internal_name("DingReGaDong") \ == "ding_rega_dong" assert API._to_internal_name("dingBidCoSDong") \ == "ding_bidcos_dong" assert API._to_internal_name("ding.BidCoSDong") \ == "ding_bidcos_dong" assert API._to_internal_name("Interface.setBidCoSInterface") \ == "interface_set_bidcos_interface" def test_abstract_methods(self, API): with pytest.raises(NotImplementedError): pmatic.api.AbstractAPI._get_methods_config(API) with pytest.raises(NotImplementedError): API._call("bla") with pytest.raises(NotImplementedError): pmatic.api.AbstractAPI.close(API)
LarsMichelsen/pmatic
tests/test_api.py
pmatic/manager.py
"""All methods needed to bootstrap a Home Assistant instance.""" import asyncio import logging.handlers from timeit import default_timer as timer from types import ModuleType from typing import Optional, Dict, List from homeassistant import requirements, core, loader, config as conf_util from homeassistant.config import async_notify_setup_error from homeassistant.const import EVENT_COMPONENT_LOADED, PLATFORM_FORMAT from homeassistant.exceptions import HomeAssistantError from homeassistant.util.async_ import run_coroutine_threadsafe _LOGGER = logging.getLogger(__name__) ATTR_COMPONENT = 'component' DATA_SETUP = 'setup_tasks' DATA_DEPS_REQS = 'deps_reqs_processed' SLOW_SETUP_WARNING = 10 def setup_component(hass: core.HomeAssistant, domain: str, config: Optional[Dict] = None) -> bool: """Set up a component and all its dependencies.""" return run_coroutine_threadsafe( # type: ignore async_setup_component(hass, domain, config), loop=hass.loop).result() async def async_setup_component(hass: core.HomeAssistant, domain: str, config: Optional[Dict] = None) -> bool: """Set up a component and all its dependencies. This method is a coroutine. """ if domain in hass.config.components: return True setup_tasks = hass.data.get(DATA_SETUP) if setup_tasks is not None and domain in setup_tasks: return await setup_tasks[domain] # type: ignore if config is None: config = {} if setup_tasks is None: setup_tasks = hass.data[DATA_SETUP] = {} task = setup_tasks[domain] = hass.async_create_task( _async_setup_component(hass, domain, config)) return await task # type: ignore async def _async_process_dependencies( hass: core.HomeAssistant, config: Dict, name: str, dependencies: List[str]) -> bool: """Ensure all dependencies are set up.""" blacklisted = [dep for dep in dependencies if dep in loader.DEPENDENCY_BLACKLIST] if blacklisted: _LOGGER.error("Unable to set up dependencies of %s: " "found blacklisted dependencies: %s", name, ', '.join(blacklisted)) return False tasks = [async_setup_component(hass, dep, config) for dep in dependencies] if not tasks: return True results = await asyncio.gather(*tasks, loop=hass.loop) failed = [dependencies[idx] for idx, res in enumerate(results) if not res] if failed: _LOGGER.error("Unable to set up dependencies of %s. " "Setup failed for dependencies: %s", name, ', '.join(failed)) return False return True async def _async_setup_component(hass: core.HomeAssistant, domain: str, config: Dict) -> bool: """Set up a component for Home Assistant. This method is a coroutine. """ def log_error(msg: str, link: bool = True) -> None: """Log helper.""" _LOGGER.error("Setup failed for %s: %s", domain, msg) async_notify_setup_error(hass, domain, link) component = loader.get_component(hass, domain) if not component: log_error("Component not found.", False) return False # Validate no circular dependencies components = loader.load_order_component(hass, domain) # OrderedSet is empty if component or dependencies could not be resolved if not components: log_error("Unable to resolve component or dependencies.") return False processed_config = \ conf_util.async_process_component_config(hass, config, domain) if processed_config is None: log_error("Invalid config.") return False try: await async_process_deps_reqs(hass, config, domain, component) except HomeAssistantError as err: log_error(str(err)) return False start = timer() _LOGGER.info("Setting up %s", domain) if hasattr(component, 'PLATFORM_SCHEMA'): # Entity components have their own warning warn_task = None else: warn_task = hass.loop.call_later( SLOW_SETUP_WARNING, _LOGGER.warning, "Setup of %s is taking over %s seconds.", domain, SLOW_SETUP_WARNING) try: if hasattr(component, 'async_setup'): result = await component.async_setup( # type: ignore hass, processed_config) else: result = await hass.async_add_executor_job( component.setup, hass, processed_config) # type: ignore except Exception: # pylint: disable=broad-except _LOGGER.exception("Error during setup of component %s", domain) async_notify_setup_error(hass, domain, True) return False finally: end = timer() if warn_task: warn_task.cancel() _LOGGER.info("Setup of domain %s took %.1f seconds.", domain, end - start) if result is False: log_error("Component failed to initialize.") return False if result is not True: log_error("Component did not return boolean if setup was successful. " "Disabling component.") loader.set_component(hass, domain, None) return False if hass.config_entries: for entry in hass.config_entries.async_entries(domain): await entry.async_setup(hass, component=component) hass.config.components.add(component.DOMAIN) # type: ignore # Cleanup if domain in hass.data[DATA_SETUP]: hass.data[DATA_SETUP].pop(domain) hass.bus.async_fire( EVENT_COMPONENT_LOADED, {ATTR_COMPONENT: component.DOMAIN} # type: ignore ) return True async def async_prepare_setup_platform(hass: core.HomeAssistant, config: Dict, domain: str, platform_name: str) \ -> Optional[ModuleType]: """Load a platform and makes sure dependencies are setup. This method is a coroutine. """ platform_path = PLATFORM_FORMAT.format(domain, platform_name) def log_error(msg: str) -> None: """Log helper.""" _LOGGER.error("Unable to prepare setup for platform %s: %s", platform_path, msg) async_notify_setup_error(hass, platform_path) platform = loader.get_platform(hass, domain, platform_name) # Not found if platform is None: log_error("Platform not found.") return None # Already loaded if platform_path in hass.config.components: return platform try: await async_process_deps_reqs( hass, config, platform_path, platform) except HomeAssistantError as err: log_error(str(err)) return None return platform async def async_process_deps_reqs( hass: core.HomeAssistant, config: Dict, name: str, module: ModuleType) -> None: """Process all dependencies and requirements for a module. Module is a Python module of either a component or platform. """ processed = hass.data.get(DATA_DEPS_REQS) if processed is None: processed = hass.data[DATA_DEPS_REQS] = set() elif name in processed: return if hasattr(module, 'DEPENDENCIES'): dep_success = await _async_process_dependencies( hass, config, name, module.DEPENDENCIES) # type: ignore if not dep_success: raise HomeAssistantError("Could not set up all dependencies.") if not hass.config.skip_pip and hasattr(module, 'REQUIREMENTS'): req_success = await requirements.async_process_requirements( hass, name, module.REQUIREMENTS) # type: ignore if not req_success: raise HomeAssistantError("Could not install all requirements.") processed.add(name)
"""The tests for Home Assistant frontend.""" import asyncio import re from unittest.mock import patch import pytest from homeassistant.setup import async_setup_component from homeassistant.components.frontend import ( DOMAIN, CONF_JS_VERSION, CONF_THEMES, CONF_EXTRA_HTML_URL, CONF_EXTRA_HTML_URL_ES5) from homeassistant.components.websocket_api.const import TYPE_RESULT from tests.common import mock_coro CONFIG_THEMES = { DOMAIN: { CONF_THEMES: { 'happy': { 'primary-color': 'red' } } } } @pytest.fixture def mock_http_client(hass, aiohttp_client): """Start the Hass HTTP component.""" hass.loop.run_until_complete(async_setup_component(hass, 'frontend', {})) return hass.loop.run_until_complete(aiohttp_client(hass.http.app)) @pytest.fixture def mock_http_client_with_themes(hass, aiohttp_client): """Start the Hass HTTP component.""" hass.loop.run_until_complete(async_setup_component(hass, 'frontend', { DOMAIN: { CONF_THEMES: { 'happy': { 'primary-color': 'red' } } }})) return hass.loop.run_until_complete(aiohttp_client(hass.http.app)) @pytest.fixture def mock_http_client_with_urls(hass, aiohttp_client): """Start the Hass HTTP component.""" hass.loop.run_until_complete(async_setup_component(hass, 'frontend', { DOMAIN: { CONF_JS_VERSION: 'auto', CONF_EXTRA_HTML_URL: ["https://domain.com/my_extra_url.html"], CONF_EXTRA_HTML_URL_ES5: ["https://domain.com/my_extra_url_es5.html"] }})) return hass.loop.run_until_complete(aiohttp_client(hass.http.app)) @asyncio.coroutine def test_frontend_and_static(mock_http_client): """Test if we can get the frontend.""" resp = yield from mock_http_client.get('') assert resp.status == 200 assert 'cache-control' not in resp.headers text = yield from resp.text() # Test we can retrieve frontend.js frontendjs = re.search( r'(?P<app>\/frontend_es5\/app-[A-Za-z0-9]{8}.js)', text) assert frontendjs is not None resp = yield from mock_http_client.get(frontendjs.groups(0)[0]) assert resp.status == 200 assert 'public' in resp.headers.get('cache-control') @asyncio.coroutine def test_dont_cache_service_worker(mock_http_client): """Test that we don't cache the service worker.""" resp = yield from mock_http_client.get('/service_worker_es5.js') assert resp.status == 200 assert 'cache-control' not in resp.headers resp = yield from mock_http_client.get('/service_worker.js') assert resp.status == 200 assert 'cache-control' not in resp.headers @asyncio.coroutine def test_404(mock_http_client): """Test for HTTP 404 error.""" resp = yield from mock_http_client.get('/not-existing') assert resp.status == 404 @asyncio.coroutine def test_we_cannot_POST_to_root(mock_http_client): """Test that POST is not allow to root.""" resp = yield from mock_http_client.post('/') assert resp.status == 405 @asyncio.coroutine def test_states_routes(mock_http_client): """All served by index.""" resp = yield from mock_http_client.get('/states') assert resp.status == 200 resp = yield from mock_http_client.get('/states/group.existing') assert resp.status == 200 async def test_themes_api(hass, hass_ws_client): """Test that /api/themes returns correct data.""" assert await async_setup_component(hass, 'frontend', CONFIG_THEMES) client = await hass_ws_client(hass) await client.send_json({ 'id': 5, 'type': 'frontend/get_themes', }) msg = await client.receive_json() assert msg['result']['default_theme'] == 'default' assert msg['result']['themes'] == {'happy': {'primary-color': 'red'}} async def test_themes_set_theme(hass, hass_ws_client): """Test frontend.set_theme service.""" assert await async_setup_component(hass, 'frontend', CONFIG_THEMES) client = await hass_ws_client(hass) await hass.services.async_call( DOMAIN, 'set_theme', {'name': 'happy'}, blocking=True) await client.send_json({ 'id': 5, 'type': 'frontend/get_themes', }) msg = await client.receive_json() assert msg['result']['default_theme'] == 'happy' await hass.services.async_call( DOMAIN, 'set_theme', {'name': 'default'}, blocking=True) await client.send_json({ 'id': 6, 'type': 'frontend/get_themes', }) msg = await client.receive_json() assert msg['result']['default_theme'] == 'default' async def test_themes_set_theme_wrong_name(hass, hass_ws_client): """Test frontend.set_theme service called with wrong name.""" assert await async_setup_component(hass, 'frontend', CONFIG_THEMES) client = await hass_ws_client(hass) await hass.services.async_call( DOMAIN, 'set_theme', {'name': 'wrong'}, blocking=True) await client.send_json({ 'id': 5, 'type': 'frontend/get_themes', }) msg = await client.receive_json() assert msg['result']['default_theme'] == 'default' async def test_themes_reload_themes(hass, hass_ws_client): """Test frontend.reload_themes service.""" assert await async_setup_component(hass, 'frontend', CONFIG_THEMES) client = await hass_ws_client(hass) with patch('homeassistant.components.frontend.load_yaml_config_file', return_value={DOMAIN: { CONF_THEMES: { 'sad': {'primary-color': 'blue'} }}}): await hass.services.async_call( DOMAIN, 'set_theme', {'name': 'happy'}, blocking=True) await hass.services.async_call(DOMAIN, 'reload_themes', blocking=True) await client.send_json({ 'id': 5, 'type': 'frontend/get_themes', }) msg = await client.receive_json() assert msg['result']['themes'] == {'sad': {'primary-color': 'blue'}} assert msg['result']['default_theme'] == 'default' async def test_missing_themes(hass, hass_ws_client): """Test that themes API works when themes are not defined.""" await async_setup_component(hass, 'frontend') client = await hass_ws_client(hass) await client.send_json({ 'id': 5, 'type': 'frontend/get_themes', }) msg = await client.receive_json() assert msg['id'] == 5 assert msg['type'] == TYPE_RESULT assert msg['success'] assert msg['result']['default_theme'] == 'default' assert msg['result']['themes'] == {} @asyncio.coroutine def test_extra_urls(mock_http_client_with_urls): """Test that extra urls are loaded.""" resp = yield from mock_http_client_with_urls.get('/states?latest') assert resp.status == 200 text = yield from resp.text() assert text.find("href='https://domain.com/my_extra_url.html'") >= 0 @asyncio.coroutine def test_extra_urls_es5(mock_http_client_with_urls): """Test that es5 extra urls are loaded.""" resp = yield from mock_http_client_with_urls.get('/states?es5') assert resp.status == 200 text = yield from resp.text() assert text.find("href='https://domain.com/my_extra_url_es5.html'") >= 0 async def test_get_panels(hass, hass_ws_client): """Test get_panels command.""" await async_setup_component(hass, 'frontend') await hass.components.frontend.async_register_built_in_panel( 'map', 'Map', 'mdi:account-location') client = await hass_ws_client(hass) await client.send_json({ 'id': 5, 'type': 'get_panels', }) msg = await client.receive_json() assert msg['id'] == 5 assert msg['type'] == TYPE_RESULT assert msg['success'] assert msg['result']['map']['component_name'] == 'map' assert msg['result']['map']['url_path'] == 'map' assert msg['result']['map']['icon'] == 'mdi:account-location' assert msg['result']['map']['title'] == 'Map' async def test_get_translations(hass, hass_ws_client): """Test get_translations command.""" await async_setup_component(hass, 'frontend') client = await hass_ws_client(hass) with patch('homeassistant.components.frontend.async_get_translations', side_effect=lambda hass, lang: mock_coro({'lang': lang})): await client.send_json({ 'id': 5, 'type': 'frontend/get_translations', 'language': 'nl', }) msg = await client.receive_json() assert msg['id'] == 5 assert msg['type'] == TYPE_RESULT assert msg['success'] assert msg['result'] == {'resources': {'lang': 'nl'}} async def test_auth_load(mock_http_client): """Test auth component loaded by default.""" resp = await mock_http_client.get('/auth/providers') assert resp.status == 200 async def test_onboarding_load(mock_http_client): """Test onboarding component loaded by default.""" resp = await mock_http_client.get('/api/onboarding') assert resp.status == 200 async def test_auth_authorize(mock_http_client): """Test the authorize endpoint works.""" resp = await mock_http_client.get( '/auth/authorize?response_type=code&client_id=https://localhost/&' 'redirect_uri=https://localhost/&state=123%23456') assert str(resp.url.relative()) == ( '/frontend_es5/authorize.html?response_type=code&client_id=' 'https://localhost/&redirect_uri=https://localhost/&state=123%23456') resp = await mock_http_client.get( '/auth/authorize?latest&response_type=code&client_id=' 'https://localhost/&redirect_uri=https://localhost/&state=123%23456') assert str(resp.url.relative()) == ( '/frontend_latest/authorize.html?latest&response_type=code&client_id=' 'https://localhost/&redirect_uri=https://localhost/&state=123%23456')
Danielhiversen/home-assistant
tests/components/frontend/test_init.py
homeassistant/setup.py
# Authors: # Adam Young <ayoung@redhat.com> # Rob Crittenden <rcritten@redhat.com> # # Copyright (c) 2010 Red Hat # See file 'copying' for use and warranty information # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. """ Plugin to make multiple ipa calls via one remote procedure call To run this code in the lite-server curl -H "Content-Type:application/json" -H "Accept:application/json" -H "Accept-Language:en" --negotiate -u : --cacert /etc/ipa/ca.crt -d @batch_request.json -X POST http://localhost:8888/ipa/json where the contents of the file batch_request.json follow the below example {"method":"batch","params":[[ {"method":"group_find","params":[[],{}]}, {"method":"user_find","params":[[],{"whoami":"true","all":"true"}]}, {"method":"user_show","params":[["admin"],{"all":true}]} ],{}],"id":1} The format of the response is nested the same way. At the top you will see "error": null, "id": 1, "result": { "count": 3, "results": [ And then a nested response for each IPA command method sent in the request """ from ipalib import api, errors from ipalib import Command from ipalib.parameters import Str, Any from ipalib.output import Output from ipalib import output from ipalib.text import _ from ipalib.request import context from ipalib.plugable import Registry from ipapython.version import API_VERSION register = Registry() @register() class batch(Command): NO_CLI = True takes_args = ( Any('methods*', doc=_('Nested Methods to execute'), ), ) take_options = ( Str('version', cli_name='version', doc=_('Client version. Used to determine if server will accept request.'), exclude='webui', flags=['no_option', 'no_output'], default=API_VERSION, autofill=True, ), ) has_output = ( Output('count', int, doc=''), Output('results', (list, tuple), doc='') ) def execute(self, *args, **options): results = [] for arg in args[0]: params = dict() name = None try: if 'method' not in arg: raise errors.RequirementError(name='method') if 'params' not in arg: raise errors.RequirementError(name='params') name = arg['method'] if name not in self.Command: raise errors.CommandError(name=name) a, kw = arg['params'] newkw = dict((str(k), v) for k, v in kw.items()) params = api.Command[name].args_options_2_params(*a, **newkw) newkw.setdefault('version', options['version']) result = api.Command[name](*a, **newkw) self.info( '%s: batch: %s(%s): SUCCESS', context.principal, name, ', '.join(api.Command[name]._repr_iter(**params)) ) result['error']=None except Exception as e: if isinstance(e, errors.RequirementError) or \ isinstance(e, errors.CommandError): self.info( '%s: batch: %s', context.principal, e.__class__.__name__ ) else: self.info( '%s: batch: %s(%s): %s', context.principal, name, ', '.join(api.Command[name]._repr_iter(**params)), e.__class__.__name__ ) if isinstance(e, errors.PublicError): reported_error = e else: reported_error = errors.InternalError() result = dict( error=reported_error.strerror, error_code=reported_error.errno, error_name=unicode(type(reported_error).__name__), ) results.append(result) return dict(count=len(results) , results=results)
# Authors: # Petr Viktorin <pviktori@redhat.com> # # Copyright (C) 2013 Red Hat # see file 'COPYING' for use and warranty information # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. import os import tempfile import shutil import base64 import glob import contextlib import nose import pytest from ipalib import x509 from ipapython import ipautil from ipaplatform.paths import paths from ipapython.dn import DN from ipatests.test_integration.base import IntegrationTest from ipatests.test_integration import tasks _DEFAULT = object() def get_install_stdin(cert_passwords=()): lines = [ 'yes', # Existing BIND configuration detected, overwrite? [no] '', # Server host name (has default) '', # Confirm domain name (has default) ] lines.extend(cert_passwords) # Enter foo.p12 unlock password lines += [ '', # Do you want to configure the reverse zone? [yes] '', # Please specify the reverse zone name [47.34.10.in-addr.arpa.] 'yes', # Continue with these values? ] return '\n'.join(lines + ['']) def get_replica_prepare_stdin(cert_passwords=()): lines = list(cert_passwords) # Enter foo.p12 unlock password return '\n'.join(lines + ['']) def assert_error(result, stderr_text, returncode=None): "Assert that `result` command failed and its stderr contains `stderr_text`" assert stderr_text in result.stderr_text, result.stderr_text if returncode: assert result.returncode == returncode else: assert result.returncode > 0 class CALessBase(IntegrationTest): @classmethod def install(cls, mh): super(CALessBase, cls).install(mh) cls.cert_dir = tempfile.mkdtemp(prefix="ipatest-") cls.pem_filename = os.path.join(cls.cert_dir, 'root.pem') scriptfile = os.path.join(os.path.dirname(__file__), 'scripts', 'caless-create-pki') cls.cert_password = cls.master.config.admin_password cls.crl_path = os.path.join(cls.master.config.test_dir, 'crl') if cls.replicas: replica_hostname = cls.replicas[0].hostname else: replica_hostname = 'unused-replica.test' if cls.clients: client_hostname = cls.clients[0].hostname else: client_hostname = 'unused-client.test' env = { 'domain': cls.master.domain.name, 'server1': cls.master.hostname, 'server2': replica_hostname, 'client': client_hostname, 'dbdir': 'nssdb', 'dbpassword': cls.cert_password, 'crl_path': cls.crl_path, } ipautil.run(['bash', '-ex', scriptfile], cwd=cls.cert_dir, env=env) for host in cls.get_all_hosts(): tasks.apply_common_fixes(host) # Copy CRLs over base = os.path.join(cls.cert_dir, 'nssdb') host.transport.mkdir_recursive(cls.crl_path) for source in glob.glob(os.path.join(base, '*.crl')): dest = os.path.join(cls.crl_path, os.path.basename(source)) host.transport.put_file(source, dest) @classmethod def uninstall(cls, mh): # Remove the NSS database shutil.rmtree(cls.cert_dir) # Remove CA cert in /etc/pki/nssdb, in case of failed (un)install for host in cls.get_all_hosts(): cls.master.run_command(['certutil', '-d', paths.NSS_DB_DIR, '-D', '-n', 'External CA cert'], raiseonerr=False) super(CALessBase, cls).uninstall() @classmethod def install_server(cls, host=None, http_pkcs12='server.p12', dirsrv_pkcs12='server.p12', http_pkcs12_exists=True, dirsrv_pkcs12_exists=True, http_pin=_DEFAULT, dirsrv_pin=_DEFAULT, root_ca_file='root.pem', unattended=True, stdin_text=None): """Install a CA-less server Return value is the remote ipa-server-install command """ if host is None: host = cls.master if http_pin is _DEFAULT: http_pin = cls.cert_password if dirsrv_pin is _DEFAULT: dirsrv_pin = cls.cert_password files_to_copy = ['root.pem'] if http_pkcs12_exists: files_to_copy.append(http_pkcs12) if dirsrv_pkcs12_exists: files_to_copy.append(dirsrv_pkcs12) for filename in set(files_to_copy): cls.copy_cert(host, filename) host.collect_log(paths.IPASERVER_INSTALL_LOG) host.collect_log(paths.IPACLIENT_INSTALL_LOG) inst = host.domain.realm.replace('.', '-') host.collect_log(paths.SLAPD_INSTANCE_ERROR_LOG_TEMPLATE % inst) host.collect_log(paths.SLAPD_INSTANCE_ACCESS_LOG_TEMPLATE % inst) args = [ 'ipa-server-install', '--http-cert-file', http_pkcs12, '--dirsrv-cert-file', dirsrv_pkcs12, '--ca-cert-file', root_ca_file, '--ip-address', host.ip, '-r', host.domain.name, '-p', host.config.dirman_password, '-a', host.config.admin_password, '--setup-dns', '--forwarder', host.config.dns_forwarder, ] if http_pin is not None: args.extend(['--http-pin', http_pin]) if dirsrv_pin is not None: args.extend(['--dirsrv-pin', dirsrv_pin]) if unattended: args.extend(['-U']) return host.run_command(args, raiseonerr=False, stdin_text=stdin_text) @classmethod def copy_cert(cls, host, filename): host.transport.put_file(os.path.join(cls.cert_dir, filename), os.path.join(host.config.test_dir, filename)) @classmethod def uninstall_server(self, host=None): if host is None: host = self.master host.run_command(['ipa-server-install', '--uninstall', '-U']) def prepare_replica(self, _replica_number=0, replica=None, master=None, http_pkcs12='replica.p12', dirsrv_pkcs12='replica.p12', http_pkcs12_exists=True, dirsrv_pkcs12_exists=True, http_pin=_DEFAULT, dirsrv_pin=_DEFAULT, root_ca_file='root.pem', unattended=True, stdin_text=None): """Prepare a CA-less replica Puts the bundle file into test_dir on the replica if successful, otherwise ensures it is missing. Return value is the remote ipa-replica-prepare command """ if replica is None: replica = self.replicas[_replica_number] if master is None: master = self.master if http_pin is _DEFAULT: http_pin = self.cert_password if dirsrv_pin is _DEFAULT: dirsrv_pin = self.cert_password files_to_copy = ['root.pem'] if http_pkcs12_exists: files_to_copy.append(http_pkcs12) if dirsrv_pkcs12_exists: files_to_copy.append(dirsrv_pkcs12) for filename in set(files_to_copy): master.transport.put_file( os.path.join(self.cert_dir, filename), os.path.join(master.config.test_dir, filename)) replica.collect_log(paths.IPAREPLICA_INSTALL_LOG) replica.collect_log(paths.IPACLIENT_INSTALL_LOG) inst = replica.domain.realm.replace('.', '-') replica.collect_log(paths.SLAPD_INSTANCE_ERROR_LOG_TEMPLATE % inst) replica.collect_log(paths.SLAPD_INSTANCE_ACCESS_LOG_TEMPLATE % inst) args = [ 'ipa-replica-prepare', '--ip-address', replica.ip, '-p', replica.config.dirman_password, ] if http_pkcs12: args.extend(['--http-cert-file', http_pkcs12]) if dirsrv_pkcs12: args.extend(['--dirsrv-cert-file', dirsrv_pkcs12]) if http_pin is not None: args.extend(['--http-pin', http_pin]) if dirsrv_pin is not None: args.extend(['--dirsrv-pin', dirsrv_pin]) args.extend([replica.hostname]) result = master.run_command(args, raiseonerr=False, stdin_text=stdin_text) if result.returncode == 0: replica_bundle = master.get_file_contents( paths.REPLICA_INFO_GPG_TEMPLATE % replica.hostname) replica.put_file_contents(self.get_replica_filename(replica), replica_bundle) else: replica.run_command(['rm', self.get_replica_filename(replica)], raiseonerr=False) return result def get_replica_filename(self, replica): return os.path.join(replica.config.test_dir, 'replica-info.gpg') def install_replica(self, _replica_number=0, replica=None, unattended=True): """Install a CA-less replica The bundle file is expected to be in the test_dir Return value is the remote ipa-replica-install command """ if replica is None: replica = self.replicas[_replica_number] args = ['ipa-replica-install', '-U', '-p', replica.config.dirman_password, '-w', replica.config.admin_password, '--ip-address', replica.ip, self.get_replica_filename(replica)] if unattended: args.append('-U') return replica.run_command(args) @classmethod def export_pkcs12(cls, nickname, filename='server.p12', password=None): """Export a cert as PKCS#12 to the given file""" if password is None: password = cls.cert_password ipautil.run(['pk12util', '-o', filename, '-n', nickname, '-d', 'nssdb', '-K', cls.cert_password, '-W', password], cwd=cls.cert_dir) @classmethod def get_pem(cls, nickname): pem_cert, _stderr, _returncode = ipautil.run( ['certutil', '-L', '-d', 'nssdb', '-n', nickname, '-a'], cwd=cls.cert_dir) return pem_cert def verify_installation(self): """Verify CA cert PEM file and LDAP entry created by install Called from every positive server install test """ with open(self.pem_filename) as f: expected_cacrt = f.read() self.log.debug('Expected /etc/ipa/ca.crt contents:\n%s', expected_cacrt) expected_binary_cacrt = base64.b64decode(x509.strip_header( expected_cacrt)) self.log.debug('Expected binary CA cert:\n%r', expected_binary_cacrt) for host in [self.master] + self.replicas: # Check the LDAP entry ldap = host.ldap_connect() entry = ldap.get_entry(DN(('cn', 'CACert'), ('cn', 'ipa'), ('cn', 'etc'), host.domain.basedn)) cert_from_ldap = entry.single_value['cACertificate'] self.log.debug('CA cert from LDAP on %s:\n%r', host, cert_from_ldap) assert cert_from_ldap == expected_binary_cacrt # Verify certmonger was not started result = host.run_command(['getcert', 'list'], raiseonerr=False) assert result > 0 assert ('Please verify that the certmonger service has been ' 'started.' in result.stdout_text), result.stdout_text for host in self.get_all_hosts(): # Check the cert PEM file remote_cacrt = host.get_file_contents(paths.IPA_CA_CRT) self.log.debug('%s:/etc/ipa/ca.crt contents:\n%s', host, remote_cacrt) binary_cacrt = base64.b64decode(x509.strip_header(remote_cacrt)) self.log.debug('%s: Decoded /etc/ipa/ca.crt:\n%r', host, binary_cacrt) assert expected_binary_cacrt == binary_cacrt class TestServerInstall(CALessBase): num_replicas = 0 def tearDown(self): self.uninstall_server() # Remove CA cert in /etc/pki/nssdb, in case of failed (un)install for host in self.get_all_hosts(): self.master.run_command(['certutil', '-d', paths.NSS_DB_DIR, '-D', '-n', 'External CA cert'], raiseonerr=False) def test_nonexistent_ca_pem_file(self): "IPA server install with non-existent CA PEM file " self.export_pkcs12('ca1/server') with open(self.pem_filename, 'w') as f: f.write(self.get_pem('ca2')) result = self.install_server(root_ca_file='does_not_exist') assert_error(result, 'Failed to open does_not_exist: No such file ' 'or directory') def test_unknown_ca(self): "IPA server install with CA PEM file with unknown CA certificate" self.export_pkcs12('ca1/server') with open(self.pem_filename, 'w') as f: f.write(self.get_pem('ca2')) result = self.install_server() assert_error(result, 'server.p12 is not signed by root.pem, or the full ' 'certificate chain is not present in the PKCS#12 ' 'file') def test_ca_server_cert(self): "IPA server install with CA PEM file with server certificate" self.export_pkcs12('ca1/server') with open(self.pem_filename, 'w') as f: f.write(self.get_pem('ca1/server')) result = self.install_server() assert_error(result, 'trust chain of the server certificate in server.p12 ' 'contains 1 certificates, expected 2') def test_ca_2_certs(self): "IPA server install with CA PEM file with 2 certificates" self.export_pkcs12('ca1/server') with open(self.pem_filename, 'w') as f: f.write(self.get_pem('ca1')) f.write(self.get_pem('ca2')) result = self.install_server() assert_error(result, 'root.pem contains more than one certificate') def test_nonexistent_http_pkcs12_file(self): "IPA server install with non-existent HTTP PKCS#12 file" self.export_pkcs12('ca1/server') with open(self.pem_filename, 'w') as f: f.write(self.get_pem('ca1')) result = self.install_server(http_pkcs12='does_not_exist', http_pkcs12_exists=False) assert_error(result, 'Failed to open does_not_exist') def test_nonexistent_ds_pkcs12_file(self): "IPA server install with non-existent DS PKCS#12 file" self.export_pkcs12('ca1/server') with open(self.pem_filename, 'w') as f: f.write(self.get_pem('ca1')) result = self.install_server(dirsrv_pkcs12='does_not_exist', dirsrv_pkcs12_exists=False) assert_error(result, 'Failed to open does_not_exist') def test_missing_http_password(self): "IPA server install with missing HTTP PKCS#12 password (unattended)" self.export_pkcs12('ca1/server') with open(self.pem_filename, 'w') as f: f.write(self.get_pem('ca1')) result = self.install_server(http_pin=None) assert_error(result, 'ipa-server-install: error: You must specify --http-pin ' 'with --http-cert-file') def test_missing_ds_password(self): "IPA server install with missing DS PKCS#12 password (unattended)" self.export_pkcs12('ca1/server') with open(self.pem_filename, 'w') as f: f.write(self.get_pem('ca1')) result = self.install_server(dirsrv_pin=None) assert_error(result, 'ipa-server-install: error: You must specify ' '--dirsrv-pin with --dirsrv-cert-file') def test_incorect_http_pin(self): "IPA server install with incorrect HTTP PKCS#12 password" self.export_pkcs12('ca1/server') with open(self.pem_filename, 'w') as f: f.write(self.get_pem('ca1')) result = self.install_server(http_pin='bad<pin>') assert_error(result, 'incorrect password for pkcs#12 file server.p12') def test_incorect_ds_pin(self): "IPA server install with incorrect DS PKCS#12 password" self.export_pkcs12('ca1/server') with open(self.pem_filename, 'w') as f: f.write(self.get_pem('ca1')) result = self.install_server(dirsrv_pin='bad<pin>') assert_error(result, 'incorrect password for pkcs#12 file server.p12') def test_invalid_http_cn(self): "IPA server install with HTTP certificate with invalid CN" self.export_pkcs12('ca1/server-badname', filename='http.p12') self.export_pkcs12('ca1/server', filename='dirsrv.p12') with open(self.pem_filename, 'w') as f: f.write(self.get_pem('ca1')) result = self.install_server(http_pkcs12='http.p12', dirsrv_pkcs12='dirsrv.p12') assert_error(result, 'The server certificate in http.p12 is not valid: ' 'invalid for server %s' % self.master.hostname) def test_invalid_ds_cn(self): "IPA server install with DS certificate with invalid CN" self.export_pkcs12('ca1/server', filename='http.p12') self.export_pkcs12('ca1/server-badname', filename='dirsrv.p12') with open(self.pem_filename, 'w') as f: f.write(self.get_pem('ca1')) result = self.install_server(http_pkcs12='http.p12', dirsrv_pkcs12='dirsrv.p12') assert_error(result, 'The server certificate in dirsrv.p12 is not valid: ' 'invalid for server %s' % self.master.hostname) def test_expired_http(self): "IPA server install with expired HTTP certificate" self.export_pkcs12('ca1/server-expired', filename='http.p12') self.export_pkcs12('ca1/server', filename='dirsrv.p12') with open(self.pem_filename, 'w') as f: f.write(self.get_pem('ca1')) result = self.install_server(http_pkcs12='http.p12', dirsrv_pkcs12='dirsrv.p12') assert_error(result, 'The server certificate in http.p12 is not valid: ' "(SEC_ERROR_EXPIRED_CERTIFICATE) Peer's Certificate has " 'expired.') def test_expired_ds(self): "IPA server install with expired DS certificate" self.export_pkcs12('ca1/server', filename='http.p12') self.export_pkcs12('ca1/server-expired', filename='dirsrv.p12') with open(self.pem_filename, 'w') as f: f.write(self.get_pem('ca1')) result = self.install_server(http_pkcs12='http.p12', dirsrv_pkcs12='dirsrv.p12') assert_error(result, 'The server certificate in dirsrv.p12 is not valid: ' "(SEC_ERROR_EXPIRED_CERTIFICATE) Peer's Certificate has " 'expired.') def test_http_bad_usage(self): "IPA server install with HTTP certificate with invalid key usage" self.export_pkcs12('ca1/server-badusage', filename='http.p12') self.export_pkcs12('ca1/server', filename='dirsrv.p12') with open(self.pem_filename, 'w') as f: f.write(self.get_pem('ca1')) result = self.install_server(http_pkcs12='http.p12', dirsrv_pkcs12='dirsrv.p12') assert_error(result, 'The server certificate in http.p12 is not valid: ' 'invalid for a SSL server') def test_ds_bad_usage(self): "IPA server install with DS certificate with invalid key usage" self.export_pkcs12('ca1/server', filename='http.p12') self.export_pkcs12('ca1/server-badusage', filename='dirsrv.p12') with open(self.pem_filename, 'w') as f: f.write(self.get_pem('ca1')) result = self.install_server(http_pkcs12='http.p12', dirsrv_pkcs12='dirsrv.p12') assert_error(result, 'The server certificate in dirsrv.p12 is not valid: ' 'invalid for a SSL server') def test_revoked_http(self): "IPA server install with revoked HTTP certificate" self.export_pkcs12('ca1/server-revoked', filename='http.p12') self.export_pkcs12('ca1/server', filename='dirsrv.p12') with open(self.pem_filename, 'w') as f: f.write(self.get_pem('ca1')) result = self.install_server(http_pkcs12='http.p12', dirsrv_pkcs12='dirsrv.p12') if result.returncode == 0: raise nose.SkipTest( "Known CA-less installation defect, see " + "https://fedorahosted.org/freeipa/ticket/4270") assert result.returncode > 0 def test_revoked_ds(self): "IPA server install with revoked DS certificate" self.export_pkcs12('ca1/server', filename='http.p12') self.export_pkcs12('ca1/server-revoked', filename='dirsrv.p12') with open(self.pem_filename, 'w') as f: f.write(self.get_pem('ca1')) result = self.install_server(http_pkcs12='http.p12', dirsrv_pkcs12='dirsrv.p12') if result.returncode == 0: raise nose.SkipTest( "Known CA-less installation defect, see " + "https://fedorahosted.org/freeipa/ticket/4270") assert result.returncode > 0 def test_http_intermediate_ca(self): "IPA server install with HTTP certificate issued by intermediate CA" self.export_pkcs12('ca1/subca/server', filename='http.p12') self.export_pkcs12('ca1/server', filename='dirsrv.p12') with open(self.pem_filename, 'w') as f: f.write(self.get_pem('ca1')) result = self.install_server(http_pkcs12='http.p12', dirsrv_pkcs12='dirsrv.p12') assert_error(result, 'http.p12 is not signed by root.pem, or the full ' 'certificate chain is not present in the PKCS#12 file') def test_ds_intermediate_ca(self): "IPA server install with DS certificate issued by intermediate CA" self.export_pkcs12('ca1/server', filename='http.p12') self.export_pkcs12('ca1/subca/server', filename='dirsrv.p12') with open(self.pem_filename, 'w') as f: f.write(self.get_pem('ca1')) result = self.install_server(http_pkcs12='http.p12', dirsrv_pkcs12='dirsrv.p12') assert_error(result, 'dirsrv.p12 is not signed by root.pem, or the full ' 'certificate chain is not present in the PKCS#12 file') def test_ca_self_signed(self): "IPA server install with self-signed certificate" self.export_pkcs12('server-selfsign') with open(self.pem_filename, 'w') as f: f.write(self.get_pem('server-selfsign')) result = self.install_server() assert result.returncode > 0 def test_valid_certs(self): "IPA server install with valid certificates" self.export_pkcs12('ca1/server') with open(self.pem_filename, 'w') as f: f.write(self.get_pem('ca1')) result = self.install_server() assert result.returncode == 0 self.verify_installation() def test_wildcard_http(self): "IPA server install with wildcard HTTP certificate" self.export_pkcs12('ca1/wildcard', filename='http.p12') self.export_pkcs12('ca1/server', filename='dirsrv.p12') with open(self.pem_filename, 'w') as f: f.write(self.get_pem('ca1')) result = self.install_server(http_pkcs12='http.p12', dirsrv_pkcs12='dirsrv.p12') assert result.returncode == 0 self.verify_installation() def test_wildcard_ds(self): "IPA server install with wildcard DS certificate" self.export_pkcs12('ca1/server', filename='http.p12') self.export_pkcs12('ca1/wildcard', filename='dirsrv.p12') with open(self.pem_filename, 'w') as f: f.write(self.get_pem('ca1')) result = self.install_server(http_pkcs12='http.p12', dirsrv_pkcs12='dirsrv.p12') assert result.returncode == 0 self.verify_installation() def test_http_san(self): "IPA server install with HTTP certificate with SAN" self.export_pkcs12('ca1/server-altname', filename='http.p12') self.export_pkcs12('ca1/server', filename='dirsrv.p12') with open(self.pem_filename, 'w') as f: f.write(self.get_pem('ca1')) result = self.install_server(http_pkcs12='http.p12', dirsrv_pkcs12='dirsrv.p12') assert result.returncode == 0 self.verify_installation() def test_ds_san(self): "IPA server install with DS certificate with SAN" self.export_pkcs12('ca1/server', filename='http.p12') self.export_pkcs12('ca1/server-altname', filename='dirsrv.p12') with open(self.pem_filename, 'w') as f: f.write(self.get_pem('ca1')) result = self.install_server(http_pkcs12='http.p12', dirsrv_pkcs12='dirsrv.p12') assert result.returncode == 0 self.verify_installation() def test_interactive_missing_http_pkcs_password(self): "IPA server install with prompt for HTTP PKCS#12 password" self.export_pkcs12('ca1/server') with open(self.pem_filename, 'w') as f: f.write(self.get_pem('ca1')) stdin_text = get_install_stdin(cert_passwords=[self.cert_password]) result = self.install_server(http_pin=None, unattended=False, stdin_text=stdin_text) assert result.returncode == 0 self.verify_installation() assert ('Enter server.p12 unlock password:' in result.stdout_text), result.stdout_text def test_interactive_missing_ds_pkcs_password(self): "IPA server install with prompt for DS PKCS#12 password" self.export_pkcs12('ca1/server') with open(self.pem_filename, 'w') as f: f.write(self.get_pem('ca1')) stdin_text = get_install_stdin(cert_passwords=[self.cert_password]) result = self.install_server(dirsrv_pin=None, unattended=False, stdin_text=stdin_text) assert result.returncode == 0 self.verify_installation() assert ('Enter server.p12 unlock password:' in result.stdout_text), result.stdout_text def test_no_http_password(self): "IPA server install with empty HTTP password" self.export_pkcs12('ca1/server', filename='http.p12', password='') self.export_pkcs12('ca1/server', filename='dirsrv.p12') with open(self.pem_filename, 'w') as f: f.write(self.get_pem('ca1')) result = self.install_server(http_pkcs12='http.p12', dirsrv_pkcs12='dirsrv.p12', http_pin='') assert result.returncode == 0 self.verify_installation() def test_no_ds_password(self): "IPA server install with empty DS password" self.export_pkcs12('ca1/server', filename='http.p12') self.export_pkcs12('ca1/server', filename='dirsrv.p12', password='') with open(self.pem_filename, 'w') as f: f.write(self.get_pem('ca1')) result = self.install_server(http_pkcs12='http.p12', dirsrv_pkcs12='dirsrv.p12', dirsrv_pin='') assert result.returncode == 0 self.verify_installation() class TestReplicaInstall(CALessBase): num_replicas = 1 def setUp(self): # Install the master for every test self.export_pkcs12('ca1/server') with open(self.pem_filename, 'w') as f: f.write(self.get_pem('ca1')) result = self.install_server() assert result.returncode == 0 def tearDown(self): # Uninstall both master and replica replica = self.replicas[0] tasks.kinit_admin(self.master) self.uninstall_server(replica) self.master.run_command(['ipa-replica-manage', 'del', replica.hostname, '--force'], raiseonerr=False) self.master.run_command(['ipa', 'host-del', replica.hostname], raiseonerr=False) replica.run_command(['certutil', '-d', paths.NSS_DB_DIR, '-D', '-n', 'External CA cert'], raiseonerr=False) self.uninstall_server() self.master.run_command(['certutil', '-d', paths.NSS_DB_DIR, '-D', '-n', 'External CA cert'], raiseonerr=False) def test_no_certs(self): "IPA replica install without certificates" result = self.master.run_command(['ipa-replica-prepare', self.replicas[0].hostname], raiseonerr=False) assert result.returncode > 0 assert ('Cannot issue certificates: a CA is not installed. Use the ' '--http-cert-file, --dirsrv-cert-file options to provide ' 'custom certificates.' in result.stderr_text), \ result.stderr_text def test_nonexistent_http_pkcs12_file(self): "IPA replica install with non-existent HTTP PKCS#12 file" self.export_pkcs12('ca1/replica', filename='dirsrv.p12') result = self.prepare_replica(http_pkcs12='does_not_exist', dirsrv_pkcs12='dirsrv.p12', http_pkcs12_exists=False) assert_error(result, 'Failed to open does_not_exist') def test_nonexistent_ds_pkcs12_file(self): "IPA replica install with non-existent DS PKCS#12 file" self.export_pkcs12('ca1/replica', filename='http.p12') result = self.prepare_replica(dirsrv_pkcs12='does_not_exist', http_pkcs12='http.p12', dirsrv_pkcs12_exists=False) assert_error(result, 'Failed to open does_not_exist') def test_incorect_http_pin(self): "IPA replica install with incorrect HTTP PKCS#12 password" self.export_pkcs12('ca1/replica', filename='replica.p12') result = self.prepare_replica(http_pin='bad<pin>') assert result.returncode > 0 assert_error(result, 'incorrect password for pkcs#12 file replica.p12') def test_incorect_ds_pin(self): "IPA replica install with incorrect DS PKCS#12 password" self.export_pkcs12('ca1/replica', filename='replica.p12') result = self.prepare_replica(dirsrv_pin='bad<pin>') assert_error(result, 'incorrect password for pkcs#12 file replica.p12') def test_http_unknown_ca(self): "IPA replica install with HTTP certificate issued by unknown CA" self.export_pkcs12('ca2/replica', filename='http.p12') self.export_pkcs12('ca1/replica', filename='dirsrv.p12') result = self.prepare_replica(http_pkcs12='http.p12', dirsrv_pkcs12='dirsrv.p12') assert_error(result, 'http.p12 is not signed by /etc/ipa/ca.crt, or the full ' 'certificate chain is not present in the PKCS#12 file') def test_ds_unknown_ca(self): "IPA replica install with DS certificate issued by unknown CA" self.export_pkcs12('ca1/replica', filename='http.p12') self.export_pkcs12('ca2/replica', filename='dirsrv.p12') result = self.prepare_replica(http_pkcs12='http.p12', dirsrv_pkcs12='dirsrv.p12') assert_error(result, 'dirsrv.p12 is not signed by /etc/ipa/ca.crt, or the ' 'full certificate chain is not present in the PKCS#12 ' 'file') def test_invalid_http_cn(self): "IPA replica install with HTTP certificate with invalid CN" self.export_pkcs12('ca1/replica-badname', filename='http.p12') self.export_pkcs12('ca1/replica', filename='dirsrv.p12') result = self.prepare_replica(http_pkcs12='http.p12', dirsrv_pkcs12='dirsrv.p12') assert_error(result, 'The server certificate in http.p12 is not valid: ' 'invalid for server %s' % self.replicas[0].hostname) def test_invalid_ds_cn(self): "IPA replica install with DS certificate with invalid CN" self.export_pkcs12('ca1/replica', filename='http.p12') self.export_pkcs12('ca1/replica-badname', filename='dirsrv.p12') result = self.prepare_replica(http_pkcs12='http.p12', dirsrv_pkcs12='dirsrv.p12') assert_error(result, 'The server certificate in dirsrv.p12 is not valid: ' 'invalid for server %s' % self.replicas[0].hostname) def test_expired_http(self): "IPA replica install with expired HTTP certificate" self.export_pkcs12('ca1/replica-expired', filename='http.p12') self.export_pkcs12('ca1/replica', filename='dirsrv.p12') result = self.prepare_replica(http_pkcs12='http.p12', dirsrv_pkcs12='dirsrv.p12') assert_error(result, 'The server certificate in http.p12 is not valid: ' "(SEC_ERROR_EXPIRED_CERTIFICATE) Peer's Certificate has " 'expired.') def test_expired_ds(self): "IPA replica install with expired DS certificate" self.export_pkcs12('ca1/replica-expired', filename='http.p12') self.export_pkcs12('ca1/replica', filename='dirsrv.p12') result = self.prepare_replica(http_pkcs12='http.p12', dirsrv_pkcs12='dirsrv.p12') assert_error(result, 'The server certificate in http.p12 is not valid: ' "(SEC_ERROR_EXPIRED_CERTIFICATE) Peer's Certificate has " 'expired.') def test_http_bad_usage(self): "IPA replica install with HTTP certificate with invalid key usage" self.export_pkcs12('ca1/replica-badusage', filename='http.p12') self.export_pkcs12('ca1/replica', filename='dirsrv.p12') result = self.prepare_replica(http_pkcs12='http.p12', dirsrv_pkcs12='dirsrv.p12') assert_error(result, 'The server certificate in http.p12 is not valid: ' 'invalid for a SSL server') def test_ds_bad_usage(self): "IPA replica install with DS certificate with invalid key usage" self.export_pkcs12('ca1/replica', filename='http.p12') self.export_pkcs12('ca1/replica-badusage', filename='dirsrv.p12') result = self.prepare_replica(http_pkcs12='http.p12', dirsrv_pkcs12='dirsrv.p12') assert_error(result, 'The server certificate in dirsrv.p12 is not valid: ' 'invalid for a SSL server') def test_revoked_http(self): "IPA replica install with revoked HTTP certificate" self.export_pkcs12('ca1/replica-revoked', filename='http.p12') self.export_pkcs12('ca1/replica', filename='dirsrv.p12') result = self.prepare_replica(http_pkcs12='http.p12', dirsrv_pkcs12='dirsrv.p12') if result.returncode == 0: raise nose.SkipTest( "Known CA-less installation defect, see " + "https://fedorahosted.org/freeipa/ticket/4270") assert result.returncode > 0 def test_revoked_ds(self): "IPA replica install with revoked DS certificate" self.export_pkcs12('ca1/replica', filename='http.p12') self.export_pkcs12('ca1/replica-revoked', filename='dirsrv.p12') result = self.prepare_replica(http_pkcs12='http.p12', dirsrv_pkcs12='dirsrv.p12') if result.returncode == 0: raise nose.SkipTest( "Known CA-less installation defect, see " + "https://fedorahosted.org/freeipa/ticket/4270") assert result.returncode > 0 def test_http_intermediate_ca(self): "IPA replica install with HTTP certificate issued by intermediate CA" self.export_pkcs12('ca1/subca/replica', filename='http.p12') self.export_pkcs12('ca1/replica', filename='dirsrv.p12') result = self.prepare_replica(http_pkcs12='http.p12', dirsrv_pkcs12='dirsrv.p12') assert_error(result, 'http.p12 is not signed by /etc/ipa/ca.crt, or the full ' 'certificate chain is not present in the PKCS#12 file') def test_ds_intermediate_ca(self): "IPA replica install with DS certificate issued by intermediate CA" self.export_pkcs12('ca1/replica', filename='http.p12') self.export_pkcs12('ca1/subca/replica', filename='dirsrv.p12') result = self.prepare_replica(http_pkcs12='http.p12', dirsrv_pkcs12='dirsrv.p12') assert_error(result, 'dirsrv.p12 is not signed by /etc/ipa/ca.crt, or the ' 'full certificate chain is not present in the PKCS#12 ' 'file') def test_valid_certs(self): "IPA replica install with valid certificates" self.export_pkcs12('ca1/replica', filename='server.p12') result = self.prepare_replica(http_pkcs12='server.p12', dirsrv_pkcs12='server.p12') assert result.returncode == 0 result = self.install_replica() assert result.returncode == 0 self.verify_installation() def test_wildcard_http(self): "IPA replica install with wildcard HTTP certificate" self.export_pkcs12('ca1/wildcard', filename='http.p12') self.export_pkcs12('ca1/replica', filename='dirsrv.p12') result = self.prepare_replica(http_pkcs12='http.p12', dirsrv_pkcs12='dirsrv.p12') assert result.returncode == 0 result = self.install_replica() assert result.returncode == 0 self.verify_installation() def test_wildcard_ds(self): "IPA replica install with wildcard DS certificate" self.export_pkcs12('ca1/wildcard', filename='http.p12') self.export_pkcs12('ca1/replica', filename='dirsrv.p12') result = self.prepare_replica(http_pkcs12='http.p12', dirsrv_pkcs12='dirsrv.p12') assert result.returncode == 0 result = self.install_replica() assert result.returncode == 0 self.verify_installation() def test_http_san(self): "IPA replica install with HTTP certificate with SAN" self.export_pkcs12('ca1/replica-altname', filename='http.p12') self.export_pkcs12('ca1/replica', filename='dirsrv.p12') result = self.prepare_replica(http_pkcs12='http.p12', dirsrv_pkcs12='dirsrv.p12') assert result.returncode == 0 result = self.install_replica() assert result.returncode == 0 self.verify_installation() def test_ds_san(self): "IPA replica install with DS certificate with SAN" self.export_pkcs12('ca1/replica', filename='http.p12') self.export_pkcs12('ca1/replica-altname', filename='dirsrv.p12') result = self.prepare_replica(http_pkcs12='http.p12', dirsrv_pkcs12='dirsrv.p12') assert result.returncode == 0 result = self.install_replica() assert result.returncode == 0 self.verify_installation() def test_interactive_missing_http_pkcs_password(self): "IPA replica install with missing HTTP PKCS#12 password" self.export_pkcs12('ca1/replica', filename='replica.p12') stdin_text = get_replica_prepare_stdin( cert_passwords=[self.cert_password]) result = self.prepare_replica(http_pin=None, unattended=False, stdin_text=stdin_text) assert result.returncode == 0 result = self.install_replica() assert result.returncode == 0 self.verify_installation() def test_interactive_missing_ds_pkcs_password(self): "IPA replica install with missing DS PKCS#12 password" self.export_pkcs12('ca1/replica', filename='replica.p12') stdin_text = get_replica_prepare_stdin( cert_passwords=[self.cert_password]) result = self.prepare_replica(dirsrv_pin=None, unattended=False, stdin_text=stdin_text) assert result.returncode == 0 result = self.install_replica() assert result.returncode == 0 self.verify_installation() def test_no_http_password(self): "IPA replica install with empty HTTP password" self.export_pkcs12('ca1/replica', filename='http.p12', password='') self.export_pkcs12('ca1/replica', filename='dirsrv.p12') result = self.prepare_replica(http_pkcs12='http.p12', dirsrv_pkcs12='dirsrv.p12', http_pin='') assert result.returncode == 0 result = self.install_replica() assert result.returncode == 0 self.verify_installation() def test_no_ds_password(self): "IPA replica install with empty DS password" self.export_pkcs12('ca1/replica', filename='http.p12') self.export_pkcs12('ca1/replica', filename='dirsrv.p12', password='') result = self.prepare_replica(http_pkcs12='http.p12', dirsrv_pkcs12='dirsrv.p12', dirsrv_pin='') assert result.returncode == 0 result = self.install_replica() assert result.returncode == 0 class TestClientInstall(CALessBase): num_clients = 1 def test_client_install(self): "IPA client install" self.export_pkcs12('ca1/server') with open(self.pem_filename, 'w') as f: f.write(self.get_pem('ca1')) result = self.install_server() assert result.returncode == 0 self.clients[0].run_command(['ipa-client-install', '--domain', self.master.domain.name, '--server', self.master.hostname, '-p', self.master.config.admin_name, '-w', self.master.config.admin_password, '-U']) self.verify_installation() class TestIPACommands(CALessBase): @classmethod def install(cls, mh): super(TestIPACommands, cls).install(mh) cls.export_pkcs12('ca1/server') with open(cls.pem_filename, 'w') as f: f.write(cls.get_pem('ca1')) result = cls.install_server() assert result.returncode == 0 tasks.kinit_admin(cls.master) cls.client_pem = ''.join(cls.get_pem('ca1/client').splitlines()[1:-1]) cls.log.debug('Client PEM:\n%r' % cls.client_pem) cls.test_hostname = 'testhost.%s' % cls.master.domain.name cls.test_service = 'test/%s' % cls.test_hostname def check_ipa_command_not_available(self, command): "Verify that the given IPA subcommand is not available" result = self.master.run_command(['ipa', command], raiseonerr=False) assert_error(result, "ipa: ERROR: unknown command '%s'" % command) @pytest.mark.parametrize('command', ( 'cert-status', 'cert-show', 'cert-find', 'cert-revoke', 'cert-remove-hold', 'cert-status')) def test_cert_commands_unavailable(self, command): result = self.master.run_command(['ipa', command], raiseonerr=False) assert_error(result, "ipa: ERROR: unknown command '%s'" % command) def test_cert_help_unavailable(self): "Verify that cert plugin help is not available" result = self.master.run_command(['ipa', 'help', 'cert'], raiseonerr=False) assert_error(result, "ipa: ERROR: no command nor help topic 'cert'", returncode=1) @contextlib.contextmanager def host(self): "Context manager that adds and removes a host entry with a certificate" self.master.run_command(['ipa', 'host-add', self.test_hostname, '--force', '--certificate', self.client_pem]) try: yield finally: self.master.run_command(['ipa', 'host-del', self.test_hostname], raiseonerr=False) @contextlib.contextmanager def service(self): "Context manager that adds and removes host & service entries" with self.host(): self.master.run_command(['ipa', 'service-add', self.test_service, '--force', '--certificate', self.client_pem]) yield def test_service_mod_doesnt_revoke(self): "Verify that service-mod does not attempt to revoke certificate" with self.service(): self.master.run_command(['ipa', 'service-mod', self.test_service, '--certificate=']) def test_service_disable_doesnt_revoke(self): "Verify that service-disable does not attempt to revoke certificate" with self.service(): self.master.run_command(['ipa', 'service-disable', self.test_service]) def test_service_del_doesnt_revoke(self): "Verify that service-del does not attempt to revoke certificate" with self.service(): self.master.run_command(['ipa', 'service-del', self.test_service]) def test_host_mod_doesnt_revoke(self): "Verify that host-mod does not attempt to revoke host's certificate" with self.host(): self.master.run_command(['ipa', 'host-mod', self.test_hostname, '--certificate=']) def test_host_disable_doesnt_revoke(self): "Verify that host-disable does not attempt to revoke host certificate" with self.host(): self.master.run_command(['ipa', 'host-disable', self.test_hostname]) def test_host_del_doesnt_revoke(self): "Verify that host-del does not attempt to revoke host's certificate" with self.host(): self.master.run_command(['ipa', 'host-del', self.test_hostname]) class TestCertinstall(CALessBase): @classmethod def install(cls, mh): super(TestCertinstall, cls).install() cls.export_pkcs12('ca1/server') with open(cls.pem_filename, 'w') as f: f.write(cls.get_pem('ca1')) result = cls.install_server() assert result.returncode == 0 tasks.kinit_admin(cls.master) def certinstall(self, mode, cert_nick=None, cert_exists=True, filename='server.p12', pin=_DEFAULT, stdin_text=None, p12_pin=None, args=None): if cert_nick: self.export_pkcs12(cert_nick, password=p12_pin) if pin is _DEFAULT: pin = self.cert_password if cert_exists: self.copy_cert(self.master, filename) if not args: args = ['ipa-server-certinstall', '-%s' % mode, filename] if pin is not None: args += ['--pin', pin] if mode == 'd': args += ['--dirman-password', self.master.config.dirman_password] return self.master.run_command(args, raiseonerr=False, stdin_text=stdin_text) def test_nonexistent_http_pkcs12_file(self): "Install new HTTP certificate from non-existent PKCS#12 file" result = self.certinstall('w', filename='does_not_exist', pin='none', cert_exists=False) assert_error(result, 'Failed to open does_not_exist') def test_nonexistent_ds_pkcs12_file(self): "Install new DS certificate from non-existent PKCS#12 file" result = self.certinstall('d', filename='does_not_exist', pin='none', cert_exists=False) assert_error(result, 'Failed to open does_not_exist') def test_incorect_http_pin(self): "Install new HTTP certificate with incorrect PKCS#12 password" result = self.certinstall('w', 'ca1/server', pin='bad<pin>') assert_error(result, 'incorrect password for pkcs#12 file server.p12') def test_incorect_dirsrv_pin(self): "Install new DS certificate with incorrect PKCS#12 password" result = self.certinstall('d', 'ca1/server', pin='bad<pin>') assert_error(result, 'incorrect password for pkcs#12 file server.p12') def test_invalid_http_cn(self): "Install new HTTP certificate with invalid CN " result = self.certinstall('w', 'ca1/server-badname') assert_error(result, 'The server certificate in server.p12 is not valid: ' 'invalid for server %s' % self.master.hostname) def test_invalid_ds_cn(self): "Install new DS certificate with invalid CN " result = self.certinstall('d', 'ca1/server-badname') assert_error(result, 'The server certificate in server.p12 is not valid: ' 'invalid for server %s' % self.master.hostname) def test_expired_http(self): "Install new expired HTTP certificate" result = self.certinstall('w', 'ca1/server-expired') assert_error(result, 'The server certificate in server.p12 is not valid: ' "(SEC_ERROR_EXPIRED_CERTIFICATE) Peer's Certificate has " 'expired.') def test_expired_ds(self): "Install new expired DS certificate" result = self.certinstall('d', 'ca1/server-expired') assert_error(result, 'The server certificate in server.p12 is not valid: ' "(SEC_ERROR_EXPIRED_CERTIFICATE) Peer's Certificate has " 'expired.') def test_http_bad_usage(self): "Install new HTTP certificate with invalid key usage" result = self.certinstall('w', 'ca1/server-badusage') assert_error(result, 'The server certificate in server.p12 is not valid: ' 'invalid for a SSL server') def test_ds_bad_usage(self): "Install new DS certificate with invalid key usage" result = self.certinstall('d', 'ca1/server-badusage') assert_error(result, 'The server certificate in server.p12 is not valid: ' 'invalid for a SSL server') def test_revoked_http(self): "Install new revoked HTTP certificate" result = self.certinstall('w', 'ca1/server-revoked') if result.returncode == 0: raise nose.SkipTest( "Known CA-less installation defect, see " + "https://fedorahosted.org/freeipa/ticket/4270") assert result.returncode > 0 def test_revoked_ds(self): "Install new revoked DS certificate" result = self.certinstall('d', 'ca1/server-revoked') if result.returncode == 0: raise nose.SkipTest( "Known CA-less installation defect, see " + "https://fedorahosted.org/freeipa/ticket/4270") assert result.returncode > 0 def test_http_intermediate_ca(self): "Install new HTTP certificate issued by intermediate CA" result = self.certinstall('w', 'ca1/subca/server') assert_error(result, 'server.p12 is not signed by /etc/ipa/ca.crt, or the ' 'full certificate chain is not present in the PKCS#12 ' 'file') def test_ds_intermediate_ca(self): "Install new DS certificate issued by intermediate CA" result = self.certinstall('d', 'ca1/subca/server') assert_error(result, 'server.p12 is not signed by /etc/ipa/ca.crt, or the ' 'full certificate chain is not present in the PKCS#12 ' 'file') def test_self_signed(self): "Install new self-signed certificate" result = self.certinstall('w', 'server-selfsign') assert_error(result, 'server.p12 is not signed by /etc/ipa/ca.crt, or the ' 'full certificate chain is not present in the PKCS#12 ' 'file') def test_valid_http(self): "Install new valid HTTP certificate" result = self.certinstall('w', 'ca1/server') assert result.returncode == 0 def test_valid_ds(self): "Install new valid DS certificate" result = self.certinstall('d', 'ca1/server') assert result.returncode == 0 def test_wildcard_http(self): "Install new wildcard HTTP certificate" result = self.certinstall('w', 'ca1/wildcard') assert result.returncode == 0 def test_wildcard_ds(self): "Install new wildcard DS certificate" result = self.certinstall('d', 'ca1/wildcard') assert result.returncode == 0 def test_http_san(self): "Install new HTTP certificate with SAN" result = self.certinstall('w', 'ca1/server-altname') assert result.returncode == 0 def test_ds_san(self): "Install new DS certificate with SAN" result = self.certinstall('d', 'ca1/server-altname') assert result.returncode == 0 def test_interactive_missing_http_pkcs_password(self): "Install new HTTP certificate with missing PKCS#12 password" result = self.certinstall('w', 'ca1/server', pin=None, stdin_text=self.cert_password + '\n') assert result.returncode == 0 def test_interactive_missing_ds_pkcs_password(self): "Install new DS certificate with missing PKCS#12 password" result = self.certinstall('d', 'ca1/server', pin=None, stdin_text=self.cert_password + '\n') assert result.returncode == 0 def test_no_http_password(self): "Install new HTTP certificate with no PKCS#12 password" result = self.certinstall('w', 'ca1/server', pin='', p12_pin='') assert result.returncode == 0 def test_no_ds_password(self): "Install new DS certificate with no PKCS#12 password" result = self.certinstall('d', 'ca1/server', pin='', p12_pin='') assert result.returncode == 0 def test_http_old_options(self): "Install new valid DS certificate using pre-v3.3 CLI options" # http://www.freeipa.org/page/V3/ipa-server-certinstall_CLI_cleanup args = ['ipa-server-certinstall', '-w', 'server.p12', '--http-pin', self.cert_password] result = self.certinstall('w', 'ca1/server', args=args) assert result.returncode == 0 def test_ds_old_options(self): "Install new valid DS certificate using pre-v3.3 CLI options" # http://www.freeipa.org/page/V3/ipa-server-certinstall_CLI_cleanup args = ['ipa-server-certinstall', '-d', 'server.p12', '--dirsrv-pin', self.cert_password] stdin_text = self.master.config.dirman_password + '\n' result = self.certinstall('d', 'ca1/server', args=args, stdin_text=stdin_text) assert result.returncode == 0
msimacek/freeipa
ipatests/test_integration/test_caless.py
ipalib/plugins/batch.py
from typing import Any import numpy as np import numpy.typing as npt AR_f8: npt.NDArray[np.float64] = np.array([1.0]) AR_i4 = np.array([1], dtype=np.int32) AR_u1 = np.array([1], dtype=np.uint8) AR_LIKE_f = [1.5] AR_LIKE_i = [1] b_f8 = np.broadcast(AR_f8) b_i4_f8_f8 = np.broadcast(AR_i4, AR_f8, AR_f8) next(b_f8) b_f8.reset() b_f8.index b_f8.iters b_f8.nd b_f8.ndim b_f8.numiter b_f8.shape b_f8.size next(b_i4_f8_f8) b_i4_f8_f8.reset() b_i4_f8_f8.ndim b_i4_f8_f8.index b_i4_f8_f8.iters b_i4_f8_f8.nd b_i4_f8_f8.numiter b_i4_f8_f8.shape b_i4_f8_f8.size np.inner(AR_f8, AR_i4) np.where([True, True, False]) np.where([True, True, False], 1, 0) np.lexsort([0, 1, 2]) np.can_cast(np.dtype("i8"), int) np.can_cast(AR_f8, "f8") np.can_cast(AR_f8, np.complex128, casting="unsafe") np.min_scalar_type([1]) np.min_scalar_type(AR_f8) np.result_type(int, AR_i4) np.result_type(AR_f8, AR_u1) np.result_type(AR_f8, np.complex128) np.dot(AR_LIKE_f, AR_i4) np.dot(AR_u1, 1) np.dot(1.5j, 1) np.dot(AR_u1, 1, out=AR_f8) np.vdot(AR_LIKE_f, AR_i4) np.vdot(AR_u1, 1) np.vdot(1.5j, 1) np.bincount(AR_i4) np.copyto(AR_f8, [1.6]) np.putmask(AR_f8, [True], 1.5) np.packbits(AR_i4) np.packbits(AR_u1) np.unpackbits(AR_u1) np.shares_memory(1, 2) np.shares_memory(AR_f8, AR_f8, max_work=1) np.may_share_memory(1, 2) np.may_share_memory(AR_f8, AR_f8, max_work=1)
""" Test the scalar constructors, which also do type-coercion """ import pytest import numpy as np from numpy.testing import ( assert_equal, assert_almost_equal, assert_warns, ) class TestFromString: def test_floating(self): # Ticket #640, floats from string fsingle = np.single('1.234') fdouble = np.double('1.234') flongdouble = np.longdouble('1.234') assert_almost_equal(fsingle, 1.234) assert_almost_equal(fdouble, 1.234) assert_almost_equal(flongdouble, 1.234) def test_floating_overflow(self): """ Strings containing an unrepresentable float overflow """ fhalf = np.half('1e10000') assert_equal(fhalf, np.inf) fsingle = np.single('1e10000') assert_equal(fsingle, np.inf) fdouble = np.double('1e10000') assert_equal(fdouble, np.inf) flongdouble = assert_warns(RuntimeWarning, np.longdouble, '1e10000') assert_equal(flongdouble, np.inf) fhalf = np.half('-1e10000') assert_equal(fhalf, -np.inf) fsingle = np.single('-1e10000') assert_equal(fsingle, -np.inf) fdouble = np.double('-1e10000') assert_equal(fdouble, -np.inf) flongdouble = assert_warns(RuntimeWarning, np.longdouble, '-1e10000') assert_equal(flongdouble, -np.inf) class TestExtraArgs: def test_superclass(self): # try both positional and keyword arguments s = np.str_(b'\\x61', encoding='unicode-escape') assert s == 'a' s = np.str_(b'\\x61', 'unicode-escape') assert s == 'a' # previously this would return '\\xx' with pytest.raises(UnicodeDecodeError): np.str_(b'\\xx', encoding='unicode-escape') with pytest.raises(UnicodeDecodeError): np.str_(b'\\xx', 'unicode-escape') # superclass fails, but numpy succeeds assert np.bytes_(-2) == b'-2' def test_datetime(self): dt = np.datetime64('2000-01', ('M', 2)) assert np.datetime_data(dt) == ('M', 2) with pytest.raises(TypeError): np.datetime64('2000', garbage=True) def test_bool(self): with pytest.raises(TypeError): np.bool_(False, garbage=True) def test_void(self): with pytest.raises(TypeError): np.void(b'test', garbage=True) class TestFromInt: def test_intp(self): # Ticket #99 assert_equal(1024, np.intp(1024)) def test_uint64_from_negative(self): assert_equal(np.uint64(-2), np.uint64(18446744073709551614)) int_types = [np.byte, np.short, np.intc, np.int_, np.longlong] uint_types = [np.ubyte, np.ushort, np.uintc, np.uint, np.ulonglong] float_types = [np.half, np.single, np.double, np.longdouble] cfloat_types = [np.csingle, np.cdouble, np.clongdouble] class TestArrayFromScalar: """ gh-15467 """ def _do_test(self, t1, t2): x = t1(2) arr = np.array(x, dtype=t2) # type should be preserved exactly if t2 is None: assert arr.dtype.type is t1 else: assert arr.dtype.type is t2 @pytest.mark.parametrize('t1', int_types + uint_types) @pytest.mark.parametrize('t2', int_types + uint_types + [None]) def test_integers(self, t1, t2): return self._do_test(t1, t2) @pytest.mark.parametrize('t1', float_types) @pytest.mark.parametrize('t2', float_types + [None]) def test_reals(self, t1, t2): return self._do_test(t1, t2) @pytest.mark.parametrize('t1', cfloat_types) @pytest.mark.parametrize('t2', cfloat_types + [None]) def test_complex(self, t1, t2): return self._do_test(t1, t2)
numpy/numpy
numpy/core/tests/test_scalar_ctors.py
numpy/typing/tests/data/pass/multiarray.py
from numpy.testing import assert_raises, assert_warns, assert_, assert_equal from numpy.compat import pickle import sys import subprocess import textwrap from importlib import reload def test_numpy_reloading(): # gh-7844. Also check that relevant globals retain their identity. import numpy as np import numpy._globals _NoValue = np._NoValue VisibleDeprecationWarning = np.VisibleDeprecationWarning ModuleDeprecationWarning = np.ModuleDeprecationWarning with assert_warns(UserWarning): reload(np) assert_(_NoValue is np._NoValue) assert_(ModuleDeprecationWarning is np.ModuleDeprecationWarning) assert_(VisibleDeprecationWarning is np.VisibleDeprecationWarning) assert_raises(RuntimeError, reload, numpy._globals) with assert_warns(UserWarning): reload(np) assert_(_NoValue is np._NoValue) assert_(ModuleDeprecationWarning is np.ModuleDeprecationWarning) assert_(VisibleDeprecationWarning is np.VisibleDeprecationWarning) def test_novalue(): import numpy as np for proto in range(2, pickle.HIGHEST_PROTOCOL + 1): assert_equal(repr(np._NoValue), '<no value>') assert_(pickle.loads(pickle.dumps(np._NoValue, protocol=proto)) is np._NoValue) def test_full_reimport(): """At the time of writing this, it is *not* truly supported, but apparently enough users rely on it, for it to be an annoying change when it started failing previously. """ # Test within a new process, to ensure that we do not mess with the # global state during the test run (could lead to cryptic test failures). # This is generally unsafe, especially, since we also reload the C-modules. code = textwrap.dedent(r""" import sys from pytest import warns import numpy as np for k in list(sys.modules.keys()): if "numpy" in k: del sys.modules[k] with warns(UserWarning): import numpy as np """) p = subprocess.run([sys.executable, '-c', code])
""" Test the scalar constructors, which also do type-coercion """ import pytest import numpy as np from numpy.testing import ( assert_equal, assert_almost_equal, assert_warns, ) class TestFromString: def test_floating(self): # Ticket #640, floats from string fsingle = np.single('1.234') fdouble = np.double('1.234') flongdouble = np.longdouble('1.234') assert_almost_equal(fsingle, 1.234) assert_almost_equal(fdouble, 1.234) assert_almost_equal(flongdouble, 1.234) def test_floating_overflow(self): """ Strings containing an unrepresentable float overflow """ fhalf = np.half('1e10000') assert_equal(fhalf, np.inf) fsingle = np.single('1e10000') assert_equal(fsingle, np.inf) fdouble = np.double('1e10000') assert_equal(fdouble, np.inf) flongdouble = assert_warns(RuntimeWarning, np.longdouble, '1e10000') assert_equal(flongdouble, np.inf) fhalf = np.half('-1e10000') assert_equal(fhalf, -np.inf) fsingle = np.single('-1e10000') assert_equal(fsingle, -np.inf) fdouble = np.double('-1e10000') assert_equal(fdouble, -np.inf) flongdouble = assert_warns(RuntimeWarning, np.longdouble, '-1e10000') assert_equal(flongdouble, -np.inf) class TestExtraArgs: def test_superclass(self): # try both positional and keyword arguments s = np.str_(b'\\x61', encoding='unicode-escape') assert s == 'a' s = np.str_(b'\\x61', 'unicode-escape') assert s == 'a' # previously this would return '\\xx' with pytest.raises(UnicodeDecodeError): np.str_(b'\\xx', encoding='unicode-escape') with pytest.raises(UnicodeDecodeError): np.str_(b'\\xx', 'unicode-escape') # superclass fails, but numpy succeeds assert np.bytes_(-2) == b'-2' def test_datetime(self): dt = np.datetime64('2000-01', ('M', 2)) assert np.datetime_data(dt) == ('M', 2) with pytest.raises(TypeError): np.datetime64('2000', garbage=True) def test_bool(self): with pytest.raises(TypeError): np.bool_(False, garbage=True) def test_void(self): with pytest.raises(TypeError): np.void(b'test', garbage=True) class TestFromInt: def test_intp(self): # Ticket #99 assert_equal(1024, np.intp(1024)) def test_uint64_from_negative(self): assert_equal(np.uint64(-2), np.uint64(18446744073709551614)) int_types = [np.byte, np.short, np.intc, np.int_, np.longlong] uint_types = [np.ubyte, np.ushort, np.uintc, np.uint, np.ulonglong] float_types = [np.half, np.single, np.double, np.longdouble] cfloat_types = [np.csingle, np.cdouble, np.clongdouble] class TestArrayFromScalar: """ gh-15467 """ def _do_test(self, t1, t2): x = t1(2) arr = np.array(x, dtype=t2) # type should be preserved exactly if t2 is None: assert arr.dtype.type is t1 else: assert arr.dtype.type is t2 @pytest.mark.parametrize('t1', int_types + uint_types) @pytest.mark.parametrize('t2', int_types + uint_types + [None]) def test_integers(self, t1, t2): return self._do_test(t1, t2) @pytest.mark.parametrize('t1', float_types) @pytest.mark.parametrize('t2', float_types + [None]) def test_reals(self, t1, t2): return self._do_test(t1, t2) @pytest.mark.parametrize('t1', cfloat_types) @pytest.mark.parametrize('t2', cfloat_types + [None]) def test_complex(self, t1, t2): return self._do_test(t1, t2)
numpy/numpy
numpy/core/tests/test_scalar_ctors.py
numpy/tests/test_reloading.py
"""This module implements additional tests ala autoconf which can be useful. """ import textwrap # We put them here since they could be easily reused outside numpy.distutils def check_inline(cmd): """Return the inline identifier (may be empty).""" cmd._check_compiler() body = textwrap.dedent(""" #ifndef __cplusplus static %(inline)s int static_func (void) { return 0; } %(inline)s int nostatic_func (void) { return 0; } #endif""") for kw in ['inline', '__inline__', '__inline']: st = cmd.try_compile(body % {'inline': kw}, None, None) if st: return kw return '' def check_restrict(cmd): """Return the restrict identifier (may be empty).""" cmd._check_compiler() body = textwrap.dedent(""" static int static_func (char * %(restrict)s a) { return 0; } """) for kw in ['restrict', '__restrict__', '__restrict']: st = cmd.try_compile(body % {'restrict': kw}, None, None) if st: return kw return '' def check_compiler_gcc(cmd): """Check if the compiler is GCC.""" cmd._check_compiler() body = textwrap.dedent(""" int main() { #if (! defined __GNUC__) #error gcc required #endif return 0; } """) return cmd.try_compile(body, None, None) def check_gcc_version_at_least(cmd, major, minor=0, patchlevel=0): """ Check that the gcc version is at least the specified version.""" cmd._check_compiler() version = '.'.join([str(major), str(minor), str(patchlevel)]) body = textwrap.dedent(""" int main() { #if (! defined __GNUC__) || (__GNUC__ < %(major)d) || \\ (__GNUC_MINOR__ < %(minor)d) || \\ (__GNUC_PATCHLEVEL__ < %(patchlevel)d) #error gcc >= %(version)s required #endif return 0; } """) kw = {'version': version, 'major': major, 'minor': minor, 'patchlevel': patchlevel} return cmd.try_compile(body % kw, None, None) def check_gcc_function_attribute(cmd, attribute, name): """Return True if the given function attribute is supported.""" cmd._check_compiler() body = textwrap.dedent(""" #pragma GCC diagnostic error "-Wattributes" #pragma clang diagnostic error "-Wattributes" int %s %s(void* unused) { return 0; } int main() { return 0; } """) % (attribute, name) return cmd.try_compile(body, None, None) != 0 def check_gcc_function_attribute_with_intrinsics(cmd, attribute, name, code, include): """Return True if the given function attribute is supported with intrinsics.""" cmd._check_compiler() body = textwrap.dedent(""" #include<%s> int %s %s(void) { %s; return 0; } int main() { return 0; } """) % (include, attribute, name, code) return cmd.try_compile(body, None, None) != 0 def check_gcc_variable_attribute(cmd, attribute): """Return True if the given variable attribute is supported.""" cmd._check_compiler() body = textwrap.dedent(""" #pragma GCC diagnostic error "-Wattributes" #pragma clang diagnostic error "-Wattributes" int %s foo; int main() { return 0; } """) % (attribute, ) return cmd.try_compile(body, None, None) != 0
""" Test the scalar constructors, which also do type-coercion """ import pytest import numpy as np from numpy.testing import ( assert_equal, assert_almost_equal, assert_warns, ) class TestFromString: def test_floating(self): # Ticket #640, floats from string fsingle = np.single('1.234') fdouble = np.double('1.234') flongdouble = np.longdouble('1.234') assert_almost_equal(fsingle, 1.234) assert_almost_equal(fdouble, 1.234) assert_almost_equal(flongdouble, 1.234) def test_floating_overflow(self): """ Strings containing an unrepresentable float overflow """ fhalf = np.half('1e10000') assert_equal(fhalf, np.inf) fsingle = np.single('1e10000') assert_equal(fsingle, np.inf) fdouble = np.double('1e10000') assert_equal(fdouble, np.inf) flongdouble = assert_warns(RuntimeWarning, np.longdouble, '1e10000') assert_equal(flongdouble, np.inf) fhalf = np.half('-1e10000') assert_equal(fhalf, -np.inf) fsingle = np.single('-1e10000') assert_equal(fsingle, -np.inf) fdouble = np.double('-1e10000') assert_equal(fdouble, -np.inf) flongdouble = assert_warns(RuntimeWarning, np.longdouble, '-1e10000') assert_equal(flongdouble, -np.inf) class TestExtraArgs: def test_superclass(self): # try both positional and keyword arguments s = np.str_(b'\\x61', encoding='unicode-escape') assert s == 'a' s = np.str_(b'\\x61', 'unicode-escape') assert s == 'a' # previously this would return '\\xx' with pytest.raises(UnicodeDecodeError): np.str_(b'\\xx', encoding='unicode-escape') with pytest.raises(UnicodeDecodeError): np.str_(b'\\xx', 'unicode-escape') # superclass fails, but numpy succeeds assert np.bytes_(-2) == b'-2' def test_datetime(self): dt = np.datetime64('2000-01', ('M', 2)) assert np.datetime_data(dt) == ('M', 2) with pytest.raises(TypeError): np.datetime64('2000', garbage=True) def test_bool(self): with pytest.raises(TypeError): np.bool_(False, garbage=True) def test_void(self): with pytest.raises(TypeError): np.void(b'test', garbage=True) class TestFromInt: def test_intp(self): # Ticket #99 assert_equal(1024, np.intp(1024)) def test_uint64_from_negative(self): assert_equal(np.uint64(-2), np.uint64(18446744073709551614)) int_types = [np.byte, np.short, np.intc, np.int_, np.longlong] uint_types = [np.ubyte, np.ushort, np.uintc, np.uint, np.ulonglong] float_types = [np.half, np.single, np.double, np.longdouble] cfloat_types = [np.csingle, np.cdouble, np.clongdouble] class TestArrayFromScalar: """ gh-15467 """ def _do_test(self, t1, t2): x = t1(2) arr = np.array(x, dtype=t2) # type should be preserved exactly if t2 is None: assert arr.dtype.type is t1 else: assert arr.dtype.type is t2 @pytest.mark.parametrize('t1', int_types + uint_types) @pytest.mark.parametrize('t2', int_types + uint_types + [None]) def test_integers(self, t1, t2): return self._do_test(t1, t2) @pytest.mark.parametrize('t1', float_types) @pytest.mark.parametrize('t2', float_types + [None]) def test_reals(self, t1, t2): return self._do_test(t1, t2) @pytest.mark.parametrize('t1', cfloat_types) @pytest.mark.parametrize('t2', cfloat_types + [None]) def test_complex(self, t1, t2): return self._do_test(t1, t2)
numpy/numpy
numpy/core/tests/test_scalar_ctors.py
numpy/distutils/command/autodist.py
"""Implementation of __array_function__ overrides from NEP-18.""" import collections import functools import os import textwrap from numpy.core._multiarray_umath import ( add_docstring, implement_array_function, _get_implementing_args) from numpy.compat._inspect import getargspec ARRAY_FUNCTION_ENABLED = bool( int(os.environ.get('NUMPY_EXPERIMENTAL_ARRAY_FUNCTION', 1))) array_function_like_doc = ( """like : array_like Reference object to allow the creation of arrays which are not NumPy arrays. If an array-like passed in as ``like`` supports the ``__array_function__`` protocol, the result will be defined by it. In this case, it ensures the creation of an array object compatible with that passed in via this argument.""" ) def set_array_function_like_doc(public_api): if public_api.__doc__ is not None: public_api.__doc__ = public_api.__doc__.replace( "${ARRAY_FUNCTION_LIKE}", array_function_like_doc, ) return public_api add_docstring( implement_array_function, """ Implement a function with checks for __array_function__ overrides. All arguments are required, and can only be passed by position. Parameters ---------- implementation : function Function that implements the operation on NumPy array without overrides when called like ``implementation(*args, **kwargs)``. public_api : function Function exposed by NumPy's public API originally called like ``public_api(*args, **kwargs)`` on which arguments are now being checked. relevant_args : iterable Iterable of arguments to check for __array_function__ methods. args : tuple Arbitrary positional arguments originally passed into ``public_api``. kwargs : dict Arbitrary keyword arguments originally passed into ``public_api``. Returns ------- Result from calling ``implementation()`` or an ``__array_function__`` method, as appropriate. Raises ------ TypeError : if no implementation is found. """) # exposed for testing purposes; used internally by implement_array_function add_docstring( _get_implementing_args, """ Collect arguments on which to call __array_function__. Parameters ---------- relevant_args : iterable of array-like Iterable of possibly array-like arguments to check for __array_function__ methods. Returns ------- Sequence of arguments with __array_function__ methods, in the order in which they should be called. """) ArgSpec = collections.namedtuple('ArgSpec', 'args varargs keywords defaults') def verify_matching_signatures(implementation, dispatcher): """Verify that a dispatcher function has the right signature.""" implementation_spec = ArgSpec(*getargspec(implementation)) dispatcher_spec = ArgSpec(*getargspec(dispatcher)) if (implementation_spec.args != dispatcher_spec.args or implementation_spec.varargs != dispatcher_spec.varargs or implementation_spec.keywords != dispatcher_spec.keywords or (bool(implementation_spec.defaults) != bool(dispatcher_spec.defaults)) or (implementation_spec.defaults is not None and len(implementation_spec.defaults) != len(dispatcher_spec.defaults))): raise RuntimeError('implementation and dispatcher for %s have ' 'different function signatures' % implementation) if implementation_spec.defaults is not None: if dispatcher_spec.defaults != (None,) * len(dispatcher_spec.defaults): raise RuntimeError('dispatcher functions can only use None for ' 'default argument values') def set_module(module): """Decorator for overriding __module__ on a function or class. Example usage:: @set_module('numpy') def example(): pass assert example.__module__ == 'numpy' """ def decorator(func): if module is not None: func.__module__ = module return func return decorator # Call textwrap.dedent here instead of in the function so as to avoid # calling dedent multiple times on the same text _wrapped_func_source = textwrap.dedent(""" @functools.wraps(implementation) def {name}(*args, **kwargs): relevant_args = dispatcher(*args, **kwargs) return implement_array_function( implementation, {name}, relevant_args, args, kwargs) """) def array_function_dispatch(dispatcher, module=None, verify=True, docs_from_dispatcher=False): """Decorator for adding dispatch with the __array_function__ protocol. See NEP-18 for example usage. Parameters ---------- dispatcher : callable Function that when called like ``dispatcher(*args, **kwargs)`` with arguments from the NumPy function call returns an iterable of array-like arguments to check for ``__array_function__``. module : str, optional __module__ attribute to set on new function, e.g., ``module='numpy'``. By default, module is copied from the decorated function. verify : bool, optional If True, verify the that the signature of the dispatcher and decorated function signatures match exactly: all required and optional arguments should appear in order with the same names, but the default values for all optional arguments should be ``None``. Only disable verification if the dispatcher's signature needs to deviate for some particular reason, e.g., because the function has a signature like ``func(*args, **kwargs)``. docs_from_dispatcher : bool, optional If True, copy docs from the dispatcher function onto the dispatched function, rather than from the implementation. This is useful for functions defined in C, which otherwise don't have docstrings. Returns ------- Function suitable for decorating the implementation of a NumPy function. """ if not ARRAY_FUNCTION_ENABLED: def decorator(implementation): if docs_from_dispatcher: add_docstring(implementation, dispatcher.__doc__) if module is not None: implementation.__module__ = module return implementation return decorator def decorator(implementation): if verify: verify_matching_signatures(implementation, dispatcher) if docs_from_dispatcher: add_docstring(implementation, dispatcher.__doc__) # Equivalently, we could define this function directly instead of using # exec. This version has the advantage of giving the helper function a # more interpettable name. Otherwise, the original function does not # show up at all in many cases, e.g., if it's written in C or if the # dispatcher gets an invalid keyword argument. source = _wrapped_func_source.format(name=implementation.__name__) source_object = compile( source, filename='<__array_function__ internals>', mode='exec') scope = { 'implementation': implementation, 'dispatcher': dispatcher, 'functools': functools, 'implement_array_function': implement_array_function, } exec(source_object, scope) public_api = scope[implementation.__name__] if module is not None: public_api.__module__ = module public_api._implementation = implementation return public_api return decorator def array_function_from_dispatcher( implementation, module=None, verify=True, docs_from_dispatcher=True): """Like array_function_dispatcher, but with function arguments flipped.""" def decorator(dispatcher): return array_function_dispatch( dispatcher, module, verify=verify, docs_from_dispatcher=docs_from_dispatcher)(implementation) return decorator
""" Test the scalar constructors, which also do type-coercion """ import pytest import numpy as np from numpy.testing import ( assert_equal, assert_almost_equal, assert_warns, ) class TestFromString: def test_floating(self): # Ticket #640, floats from string fsingle = np.single('1.234') fdouble = np.double('1.234') flongdouble = np.longdouble('1.234') assert_almost_equal(fsingle, 1.234) assert_almost_equal(fdouble, 1.234) assert_almost_equal(flongdouble, 1.234) def test_floating_overflow(self): """ Strings containing an unrepresentable float overflow """ fhalf = np.half('1e10000') assert_equal(fhalf, np.inf) fsingle = np.single('1e10000') assert_equal(fsingle, np.inf) fdouble = np.double('1e10000') assert_equal(fdouble, np.inf) flongdouble = assert_warns(RuntimeWarning, np.longdouble, '1e10000') assert_equal(flongdouble, np.inf) fhalf = np.half('-1e10000') assert_equal(fhalf, -np.inf) fsingle = np.single('-1e10000') assert_equal(fsingle, -np.inf) fdouble = np.double('-1e10000') assert_equal(fdouble, -np.inf) flongdouble = assert_warns(RuntimeWarning, np.longdouble, '-1e10000') assert_equal(flongdouble, -np.inf) class TestExtraArgs: def test_superclass(self): # try both positional and keyword arguments s = np.str_(b'\\x61', encoding='unicode-escape') assert s == 'a' s = np.str_(b'\\x61', 'unicode-escape') assert s == 'a' # previously this would return '\\xx' with pytest.raises(UnicodeDecodeError): np.str_(b'\\xx', encoding='unicode-escape') with pytest.raises(UnicodeDecodeError): np.str_(b'\\xx', 'unicode-escape') # superclass fails, but numpy succeeds assert np.bytes_(-2) == b'-2' def test_datetime(self): dt = np.datetime64('2000-01', ('M', 2)) assert np.datetime_data(dt) == ('M', 2) with pytest.raises(TypeError): np.datetime64('2000', garbage=True) def test_bool(self): with pytest.raises(TypeError): np.bool_(False, garbage=True) def test_void(self): with pytest.raises(TypeError): np.void(b'test', garbage=True) class TestFromInt: def test_intp(self): # Ticket #99 assert_equal(1024, np.intp(1024)) def test_uint64_from_negative(self): assert_equal(np.uint64(-2), np.uint64(18446744073709551614)) int_types = [np.byte, np.short, np.intc, np.int_, np.longlong] uint_types = [np.ubyte, np.ushort, np.uintc, np.uint, np.ulonglong] float_types = [np.half, np.single, np.double, np.longdouble] cfloat_types = [np.csingle, np.cdouble, np.clongdouble] class TestArrayFromScalar: """ gh-15467 """ def _do_test(self, t1, t2): x = t1(2) arr = np.array(x, dtype=t2) # type should be preserved exactly if t2 is None: assert arr.dtype.type is t1 else: assert arr.dtype.type is t2 @pytest.mark.parametrize('t1', int_types + uint_types) @pytest.mark.parametrize('t2', int_types + uint_types + [None]) def test_integers(self, t1, t2): return self._do_test(t1, t2) @pytest.mark.parametrize('t1', float_types) @pytest.mark.parametrize('t2', float_types + [None]) def test_reals(self, t1, t2): return self._do_test(t1, t2) @pytest.mark.parametrize('t1', cfloat_types) @pytest.mark.parametrize('t2', cfloat_types + [None]) def test_complex(self, t1, t2): return self._do_test(t1, t2)
numpy/numpy
numpy/core/tests/test_scalar_ctors.py
numpy/core/overrides.py
from typing import Any import numpy as np f8 = np.float64() i8 = np.int64() u8 = np.uint64() f4 = np.float32() i4 = np.int32() u4 = np.uint32() td = np.timedelta64(0, "D") b_ = np.bool_() b = bool() f = float() i = int() AR_b: np.ndarray[Any, np.dtype[np.bool_]] AR_m: np.ndarray[Any, np.dtype[np.timedelta64]] # Time structures reveal_type(td % td) # E: numpy.timedelta64 reveal_type(AR_m % td) # E: Any reveal_type(td % AR_m) # E: Any reveal_type(divmod(td, td)) # E: Tuple[{int64}, numpy.timedelta64] reveal_type(divmod(AR_m, td)) # E: Tuple[numpy.ndarray[Any, numpy.dtype[numpy.signedinteger[numpy.typing._64Bit]]], numpy.ndarray[Any, numpy.dtype[numpy.timedelta64]]] reveal_type(divmod(td, AR_m)) # E: Tuple[numpy.ndarray[Any, numpy.dtype[numpy.signedinteger[numpy.typing._64Bit]]], numpy.ndarray[Any, numpy.dtype[numpy.timedelta64]]] # Bool reveal_type(b_ % b) # E: {int8} reveal_type(b_ % i) # E: {int_} reveal_type(b_ % f) # E: {float64} reveal_type(b_ % b_) # E: {int8} reveal_type(b_ % i8) # E: {int64} reveal_type(b_ % u8) # E: {uint64} reveal_type(b_ % f8) # E: {float64} reveal_type(b_ % AR_b) # E: numpy.ndarray[Any, numpy.dtype[{int8}]] reveal_type(divmod(b_, b)) # E: Tuple[{int8}, {int8}] reveal_type(divmod(b_, i)) # E: Tuple[{int_}, {int_}] reveal_type(divmod(b_, f)) # E: Tuple[{float64}, {float64}] reveal_type(divmod(b_, b_)) # E: Tuple[{int8}, {int8}] reveal_type(divmod(b_, i8)) # E: Tuple[{int64}, {int64}] reveal_type(divmod(b_, u8)) # E: Tuple[{uint64}, {uint64}] reveal_type(divmod(b_, f8)) # E: Tuple[{float64}, {float64}] reveal_type(divmod(b_, AR_b)) # E: numpy.ndarray[Any, numpy.dtype[{int8}]], numpy.ndarray[Any, numpy.dtype[{int8}]]] reveal_type(b % b_) # E: {int8} reveal_type(i % b_) # E: {int_} reveal_type(f % b_) # E: {float64} reveal_type(b_ % b_) # E: {int8} reveal_type(i8 % b_) # E: {int64} reveal_type(u8 % b_) # E: {uint64} reveal_type(f8 % b_) # E: {float64} reveal_type(AR_b % b_) # E: numpy.ndarray[Any, numpy.dtype[{int8}]] reveal_type(divmod(b, b_)) # E: Tuple[{int8}, {int8}] reveal_type(divmod(i, b_)) # E: Tuple[{int_}, {int_}] reveal_type(divmod(f, b_)) # E: Tuple[{float64}, {float64}] reveal_type(divmod(b_, b_)) # E: Tuple[{int8}, {int8}] reveal_type(divmod(i8, b_)) # E: Tuple[{int64}, {int64}] reveal_type(divmod(u8, b_)) # E: Tuple[{uint64}, {uint64}] reveal_type(divmod(f8, b_)) # E: Tuple[{float64}, {float64}] reveal_type(divmod(AR_b, b_)) # E: numpy.ndarray[Any, numpy.dtype[{int8}]], numpy.ndarray[Any, numpy.dtype[{int8}]]] # int reveal_type(i8 % b) # E: {int64} reveal_type(i8 % i) # E: {int64} reveal_type(i8 % f) # E: {float64} reveal_type(i8 % i8) # E: {int64} reveal_type(i8 % f8) # E: {float64} reveal_type(i4 % i8) # E: {int64} reveal_type(i4 % f8) # E: {float64} reveal_type(i4 % i4) # E: {int32} reveal_type(i4 % f4) # E: {float32} reveal_type(i8 % AR_b) # E: numpy.ndarray[Any, numpy.dtype[numpy.signedinteger[Any]]] reveal_type(divmod(i8, b)) # E: Tuple[{int64}, {int64}] reveal_type(divmod(i8, i)) # E: Tuple[{int64}, {int64}] reveal_type(divmod(i8, f)) # E: Tuple[{float64}, {float64}] reveal_type(divmod(i8, i8)) # E: Tuple[{int64}, {int64}] reveal_type(divmod(i8, f8)) # E: Tuple[{float64}, {float64}] reveal_type(divmod(i8, i4)) # E: Tuple[{int64}, {int64}] reveal_type(divmod(i8, f4)) # E: Tuple[{float64}, {float64}] reveal_type(divmod(i4, i4)) # E: Tuple[{int32}, {int32}] reveal_type(divmod(i4, f4)) # E: Tuple[{float32}, {float32}] reveal_type(divmod(i8, AR_b)) # E: Tuple[numpy.ndarray[Any, numpy.dtype[numpy.signedinteger[Any]]], numpy.ndarray[Any, numpy.dtype[numpy.signedinteger[Any]]]] reveal_type(b % i8) # E: {int64} reveal_type(i % i8) # E: {int64} reveal_type(f % i8) # E: {float64} reveal_type(i8 % i8) # E: {int64} reveal_type(f8 % i8) # E: {float64} reveal_type(i8 % i4) # E: {int64} reveal_type(f8 % i4) # E: {float64} reveal_type(i4 % i4) # E: {int32} reveal_type(f4 % i4) # E: {float32} reveal_type(AR_b % i8) # E: numpy.ndarray[Any, numpy.dtype[numpy.signedinteger[Any]]] reveal_type(divmod(b, i8)) # E: Tuple[{int64}, {int64}] reveal_type(divmod(i, i8)) # E: Tuple[{int64}, {int64}] reveal_type(divmod(f, i8)) # E: Tuple[{float64}, {float64}] reveal_type(divmod(i8, i8)) # E: Tuple[{int64}, {int64}] reveal_type(divmod(f8, i8)) # E: Tuple[{float64}, {float64}] reveal_type(divmod(i4, i8)) # E: Tuple[{int64}, {int64}] reveal_type(divmod(f4, i8)) # E: Tuple[{float64}, {float64}] reveal_type(divmod(i4, i4)) # E: Tuple[{int32}, {int32}] reveal_type(divmod(f4, i4)) # E: Tuple[{float32}, {float32}] reveal_type(divmod(AR_b, i8)) # E: Tuple[numpy.ndarray[Any, numpy.dtype[numpy.signedinteger[Any]]], numpy.ndarray[Any, numpy.dtype[numpy.signedinteger[Any]]]] # float reveal_type(f8 % b) # E: {float64} reveal_type(f8 % i) # E: {float64} reveal_type(f8 % f) # E: {float64} reveal_type(i8 % f4) # E: {float64} reveal_type(f4 % f4) # E: {float32} reveal_type(f8 % AR_b) # E: numpy.ndarray[Any, numpy.dtype[numpy.floating[Any]]] reveal_type(divmod(f8, b)) # E: Tuple[{float64}, {float64}] reveal_type(divmod(f8, i)) # E: Tuple[{float64}, {float64}] reveal_type(divmod(f8, f)) # E: Tuple[{float64}, {float64}] reveal_type(divmod(f8, f8)) # E: Tuple[{float64}, {float64}] reveal_type(divmod(f8, f4)) # E: Tuple[{float64}, {float64}] reveal_type(divmod(f4, f4)) # E: Tuple[{float32}, {float32}] reveal_type(divmod(f8, AR_b)) # E: Tuple[numpy.ndarray[Any, numpy.dtype[numpy.floating[Any]]], numpy.ndarray[Any, numpy.dtype[numpy.floating[Any]]]] reveal_type(b % f8) # E: {float64} reveal_type(i % f8) # E: {float64} reveal_type(f % f8) # E: {float64} reveal_type(f8 % f8) # E: {float64} reveal_type(f8 % f8) # E: {float64} reveal_type(f4 % f4) # E: {float32} reveal_type(AR_b % f8) # E: numpy.ndarray[Any, numpy.dtype[numpy.floating[Any]]] reveal_type(divmod(b, f8)) # E: Tuple[{float64}, {float64}] reveal_type(divmod(i, f8)) # E: Tuple[{float64}, {float64}] reveal_type(divmod(f, f8)) # E: Tuple[{float64}, {float64}] reveal_type(divmod(f8, f8)) # E: Tuple[{float64}, {float64}] reveal_type(divmod(f4, f8)) # E: Tuple[{float64}, {float64}] reveal_type(divmod(f4, f4)) # E: Tuple[{float32}, {float32}] reveal_type(divmod(AR_b, f8)) # E: Tuple[numpy.ndarray[Any, numpy.dtype[numpy.floating[Any]]], numpy.ndarray[Any, numpy.dtype[numpy.floating[Any]]]]
""" Test the scalar constructors, which also do type-coercion """ import pytest import numpy as np from numpy.testing import ( assert_equal, assert_almost_equal, assert_warns, ) class TestFromString: def test_floating(self): # Ticket #640, floats from string fsingle = np.single('1.234') fdouble = np.double('1.234') flongdouble = np.longdouble('1.234') assert_almost_equal(fsingle, 1.234) assert_almost_equal(fdouble, 1.234) assert_almost_equal(flongdouble, 1.234) def test_floating_overflow(self): """ Strings containing an unrepresentable float overflow """ fhalf = np.half('1e10000') assert_equal(fhalf, np.inf) fsingle = np.single('1e10000') assert_equal(fsingle, np.inf) fdouble = np.double('1e10000') assert_equal(fdouble, np.inf) flongdouble = assert_warns(RuntimeWarning, np.longdouble, '1e10000') assert_equal(flongdouble, np.inf) fhalf = np.half('-1e10000') assert_equal(fhalf, -np.inf) fsingle = np.single('-1e10000') assert_equal(fsingle, -np.inf) fdouble = np.double('-1e10000') assert_equal(fdouble, -np.inf) flongdouble = assert_warns(RuntimeWarning, np.longdouble, '-1e10000') assert_equal(flongdouble, -np.inf) class TestExtraArgs: def test_superclass(self): # try both positional and keyword arguments s = np.str_(b'\\x61', encoding='unicode-escape') assert s == 'a' s = np.str_(b'\\x61', 'unicode-escape') assert s == 'a' # previously this would return '\\xx' with pytest.raises(UnicodeDecodeError): np.str_(b'\\xx', encoding='unicode-escape') with pytest.raises(UnicodeDecodeError): np.str_(b'\\xx', 'unicode-escape') # superclass fails, but numpy succeeds assert np.bytes_(-2) == b'-2' def test_datetime(self): dt = np.datetime64('2000-01', ('M', 2)) assert np.datetime_data(dt) == ('M', 2) with pytest.raises(TypeError): np.datetime64('2000', garbage=True) def test_bool(self): with pytest.raises(TypeError): np.bool_(False, garbage=True) def test_void(self): with pytest.raises(TypeError): np.void(b'test', garbage=True) class TestFromInt: def test_intp(self): # Ticket #99 assert_equal(1024, np.intp(1024)) def test_uint64_from_negative(self): assert_equal(np.uint64(-2), np.uint64(18446744073709551614)) int_types = [np.byte, np.short, np.intc, np.int_, np.longlong] uint_types = [np.ubyte, np.ushort, np.uintc, np.uint, np.ulonglong] float_types = [np.half, np.single, np.double, np.longdouble] cfloat_types = [np.csingle, np.cdouble, np.clongdouble] class TestArrayFromScalar: """ gh-15467 """ def _do_test(self, t1, t2): x = t1(2) arr = np.array(x, dtype=t2) # type should be preserved exactly if t2 is None: assert arr.dtype.type is t1 else: assert arr.dtype.type is t2 @pytest.mark.parametrize('t1', int_types + uint_types) @pytest.mark.parametrize('t2', int_types + uint_types + [None]) def test_integers(self, t1, t2): return self._do_test(t1, t2) @pytest.mark.parametrize('t1', float_types) @pytest.mark.parametrize('t2', float_types + [None]) def test_reals(self, t1, t2): return self._do_test(t1, t2) @pytest.mark.parametrize('t1', cfloat_types) @pytest.mark.parametrize('t2', cfloat_types + [None]) def test_complex(self, t1, t2): return self._do_test(t1, t2)
numpy/numpy
numpy/core/tests/test_scalar_ctors.py
numpy/typing/tests/data/reveal/mod.py
# Licensed under a 3-clause BSD style license - see LICENSE.rst import contextlib import re import sys import inspect import os from collections import OrderedDict from operator import itemgetter import numpy as np __all__ = ['register_reader', 'register_writer', 'register_identifier', 'identify_format', 'get_reader', 'get_writer', 'read', 'write', 'get_formats', 'IORegistryError', 'delay_doc_updates', 'UnifiedReadWriteMethod', 'UnifiedReadWrite'] __doctest_skip__ = ['register_identifier'] _readers = OrderedDict() _writers = OrderedDict() _identifiers = OrderedDict() PATH_TYPES = (str, os.PathLike) class IORegistryError(Exception): """Custom error for registry clashes. """ pass # If multiple formats are added to one class the update of the docs is quite # expensive. Classes for which the doc update is temporarly delayed are added # to this set. _delayed_docs_classes = set() @contextlib.contextmanager def delay_doc_updates(cls): """Contextmanager to disable documentation updates when registering reader and writer. The documentation is only built once when the contextmanager exits. .. versionadded:: 1.3 Parameters ---------- cls : class Class for which the documentation updates should be delayed. Notes ----- Registering multiple readers and writers can cause significant overhead because the documentation of the corresponding ``read`` and ``write`` methods are build every time. .. warning:: This contextmanager is experimental and may be replaced by a more general approach. Examples -------- see for example the source code of ``astropy.table.__init__``. """ _delayed_docs_classes.add(cls) yield _delayed_docs_classes.discard(cls) _update__doc__(cls, 'read') _update__doc__(cls, 'write') def get_formats(data_class=None, readwrite=None): """ Get the list of registered I/O formats as a Table. Parameters ---------- data_class : class, optional Filter readers/writer to match data class (default = all classes). readwrite : str or None, optional Search only for readers (``"Read"``) or writers (``"Write"``). If None search for both. Default is None. .. versionadded:: 1.3 Returns ------- format_table : :class:`~astropy.table.Table` Table of available I/O formats. """ from astropy.table import Table format_classes = sorted(set(_readers) | set(_writers), key=itemgetter(0)) rows = [] for format_class in format_classes: if (data_class is not None and not _is_best_match( data_class, format_class[1], format_classes)): continue has_read = 'Yes' if format_class in _readers else 'No' has_write = 'Yes' if format_class in _writers else 'No' has_identify = 'Yes' if format_class in _identifiers else 'No' # Check if this is a short name (e.g. 'rdb') which is deprecated in # favor of the full 'ascii.rdb'. ascii_format_class = ('ascii.' + format_class[0], format_class[1]) deprecated = 'Yes' if ascii_format_class in format_classes else '' rows.append((format_class[1].__name__, format_class[0], has_read, has_write, has_identify, deprecated)) if readwrite is not None: if readwrite == 'Read': rows = [row for row in rows if row[2] == 'Yes'] elif readwrite == 'Write': rows = [row for row in rows if row[3] == 'Yes'] else: raise ValueError('unrecognized value for "readwrite": {0}.\n' 'Allowed are "Read" and "Write" and None.') # Sorting the list of tuples is much faster than sorting it after the table # is created. (#5262) if rows: # Indices represent "Data Class", "Deprecated" and "Format". data = list(zip(*sorted(rows, key=itemgetter(0, 5, 1)))) else: data = None format_table = Table(data, names=('Data class', 'Format', 'Read', 'Write', 'Auto-identify', 'Deprecated')) if not np.any(format_table['Deprecated'] == 'Yes'): format_table.remove_column('Deprecated') return format_table def _update__doc__(data_class, readwrite): """ Update the docstring to include all the available readers / writers for the ``data_class.read`` or ``data_class.write`` functions (respectively). """ FORMATS_TEXT = 'The available built-in formats are:' # Get the existing read or write method and its docstring class_readwrite_func = getattr(data_class, readwrite) if not isinstance(class_readwrite_func.__doc__, str): # No docstring--could just be test code, or possibly code compiled # without docstrings return lines = class_readwrite_func.__doc__.splitlines() # Find the location of the existing formats table if it exists sep_indices = [ii for ii, line in enumerate(lines) if FORMATS_TEXT in line] if sep_indices: # Chop off the existing formats table, including the initial blank line chop_index = sep_indices[0] lines = lines[:chop_index] # Find the minimum indent, skipping the first line because it might be odd matches = [re.search(r'(\S)', line) for line in lines[1:]] left_indent = ' ' * min(match.start() for match in matches if match) # Get the available unified I/O formats for this class # Include only formats that have a reader, and drop the 'Data class' column format_table = get_formats(data_class, readwrite.capitalize()) format_table.remove_column('Data class') # Get the available formats as a table, then munge the output of pformat() # a bit and put it into the docstring. new_lines = format_table.pformat(max_lines=-1, max_width=80) table_rst_sep = re.sub('-', '=', new_lines[1]) new_lines[1] = table_rst_sep new_lines.insert(0, table_rst_sep) new_lines.append(table_rst_sep) # Check for deprecated names and include a warning at the end. if 'Deprecated' in format_table.colnames: new_lines.extend(['', 'Deprecated format names like ``aastex`` will be ' 'removed in a future version. Use the full ', 'name (e.g. ``ascii.aastex``) instead.']) new_lines = [FORMATS_TEXT, ''] + new_lines lines.extend([left_indent + line for line in new_lines]) # Depending on Python version and whether class_readwrite_func is # an instancemethod or classmethod, one of the following will work. if isinstance(class_readwrite_func, UnifiedReadWrite): class_readwrite_func.__class__.__doc__ = '\n'.join(lines) else: try: class_readwrite_func.__doc__ = '\n'.join(lines) except AttributeError: class_readwrite_func.__func__.__doc__ = '\n'.join(lines) def register_reader(data_format, data_class, function, force=False, priority=0): """ Register a reader function. Parameters ---------- data_format : str The data format identifier. This is the string that will be used to specify the data type when reading. data_class : class The class of the object that the reader produces. function : function The function to read in a data object. force : bool, optional Whether to override any existing function if already present. Default is ``False``. priority : int, optional The priority of the reader, used to compare possible formats when trying to determine the best reader to use. Higher priorities are preferred over lower priorities, with the default priority being 0 (negative numbers are allowed though). """ if not (data_format, data_class) in _readers or force: _readers[(data_format, data_class)] = function, priority else: raise IORegistryError("Reader for format '{}' and class '{}' is " 'already defined' ''.format(data_format, data_class.__name__)) if data_class not in _delayed_docs_classes: _update__doc__(data_class, 'read') def unregister_reader(data_format, data_class): """ Unregister a reader function Parameters ---------- data_format : str The data format identifier. data_class : class The class of the object that the reader produces. """ if (data_format, data_class) in _readers: _readers.pop((data_format, data_class)) else: raise IORegistryError("No reader defined for format '{}' and class '{}'" ''.format(data_format, data_class.__name__)) if data_class not in _delayed_docs_classes: _update__doc__(data_class, 'read') def register_writer(data_format, data_class, function, force=False, priority=0): """ Register a table writer function. Parameters ---------- data_format : str The data format identifier. This is the string that will be used to specify the data type when writing. data_class : class The class of the object that can be written. function : function The function to write out a data object. force : bool, optional Whether to override any existing function if already present. Default is ``False``. priority : int, optional The priority of the writer, used to compare possible formats when trying to determine the best writer to use. Higher priorities are preferred over lower priorities, with the default priority being 0 (negative numbers are allowed though). """ if not (data_format, data_class) in _writers or force: _writers[(data_format, data_class)] = function, priority else: raise IORegistryError("Writer for format '{}' and class '{}' is " 'already defined' ''.format(data_format, data_class.__name__)) if data_class not in _delayed_docs_classes: _update__doc__(data_class, 'write') def unregister_writer(data_format, data_class): """ Unregister a writer function Parameters ---------- data_format : str The data format identifier. data_class : class The class of the object that can be written. """ if (data_format, data_class) in _writers: _writers.pop((data_format, data_class)) else: raise IORegistryError("No writer defined for format '{}' and class '{}'" ''.format(data_format, data_class.__name__)) if data_class not in _delayed_docs_classes: _update__doc__(data_class, 'write') def register_identifier(data_format, data_class, identifier, force=False): """ Associate an identifier function with a specific data type. Parameters ---------- data_format : str The data format identifier. This is the string that is used to specify the data type when reading/writing. data_class : class The class of the object that can be written. identifier : function A function that checks the argument specified to `read` or `write` to determine whether the input can be interpreted as a table of type ``data_format``. This function should take the following arguments: - ``origin``: A string ``"read"`` or ``"write"`` identifying whether the file is to be opened for reading or writing. - ``path``: The path to the file. - ``fileobj``: An open file object to read the file's contents, or `None` if the file could not be opened. - ``*args``: Positional arguments for the `read` or `write` function. - ``**kwargs``: Keyword arguments for the `read` or `write` function. One or both of ``path`` or ``fileobj`` may be `None`. If they are both `None`, the identifier will need to work from ``args[0]``. The function should return True if the input can be identified as being of format ``data_format``, and False otherwise. force : bool, optional Whether to override any existing function if already present. Default is ``False``. Examples -------- To set the identifier based on extensions, for formats that take a filename as a first argument, you can do for example:: >>> def my_identifier(*args, **kwargs): ... return isinstance(args[0], str) and args[0].endswith('.tbl') >>> register_identifier('ipac', Table, my_identifier) """ if not (data_format, data_class) in _identifiers or force: _identifiers[(data_format, data_class)] = identifier else: raise IORegistryError("Identifier for format '{}' and class '{}' is " 'already defined'.format(data_format, data_class.__name__)) def unregister_identifier(data_format, data_class): """ Unregister an identifier function Parameters ---------- data_format : str The data format identifier. data_class : class The class of the object that can be read/written. """ if (data_format, data_class) in _identifiers: _identifiers.pop((data_format, data_class)) else: raise IORegistryError("No identifier defined for format '{}' and class" " '{}'".format(data_format, data_class.__name__)) def identify_format(origin, data_class_required, path, fileobj, args, kwargs): """Loop through identifiers to see which formats match. Parameters ---------- origin : str A string ``"read`` or ``"write"`` identifying whether the file is to be opened for reading or writing. data_class_required : object The specified class for the result of `read` or the class that is to be written. path : str or path-like or None The path to the file or None. fileobj : file-like or None. An open file object to read the file's contents, or ``None`` if the file could not be opened. args : sequence Positional arguments for the `read` or `write` function. Note that these must be provided as sequence. kwargs : dict-like Keyword arguments for the `read` or `write` function. Note that this parameter must be `dict`-like. Returns ------- valid_formats : list List of matching formats. """ valid_formats = [] for data_format, data_class in _identifiers: if _is_best_match(data_class_required, data_class, _identifiers): if _identifiers[(data_format, data_class)]( origin, path, fileobj, *args, **kwargs): valid_formats.append(data_format) return valid_formats def _get_format_table_str(data_class, readwrite): format_table = get_formats(data_class, readwrite=readwrite) format_table.remove_column('Data class') format_table_str = '\n'.join(format_table.pformat(max_lines=-1)) return format_table_str def get_reader(data_format, data_class): """Get reader for ``data_format``. Parameters ---------- data_format : str The data format identifier. This is the string that is used to specify the data type when reading/writing. data_class : class The class of the object that can be written. Returns ------- reader : callable The registered reader function for this format and class. """ readers = [(fmt, cls) for fmt, cls in _readers if fmt == data_format] for reader_format, reader_class in readers: if _is_best_match(data_class, reader_class, readers): return _readers[(reader_format, reader_class)][0] else: format_table_str = _get_format_table_str(data_class, 'Read') raise IORegistryError( "No reader defined for format '{}' and class '{}'.\n\nThe " "available formats are:\n\n{}".format( data_format, data_class.__name__, format_table_str)) def get_writer(data_format, data_class): """Get writer for ``data_format``. Parameters ---------- data_format : str The data format identifier. This is the string that is used to specify the data type when reading/writing. data_class : class The class of the object that can be written. Returns ------- writer : callable The registered writer function for this format and class. """ writers = [(fmt, cls) for fmt, cls in _writers if fmt == data_format] for writer_format, writer_class in writers: if _is_best_match(data_class, writer_class, writers): return _writers[(writer_format, writer_class)][0] else: format_table_str = _get_format_table_str(data_class, 'Write') raise IORegistryError( "No writer defined for format '{}' and class '{}'.\n\nThe " "available formats are:\n\n{}".format( data_format, data_class.__name__, format_table_str)) def read(cls, *args, format=None, cache=False, **kwargs): """ Read in data. The arguments passed to this method depend on the format. """ ctx = None try: if format is None: path = None fileobj = None if len(args): if isinstance(args[0], PATH_TYPES) and not os.path.isdir(args[0]): from astropy.utils.data import get_readable_fileobj # path might be a os.PathLike object if isinstance(args[0], os.PathLike): args = (os.fspath(args[0]),) + args[1:] path = args[0] try: ctx = get_readable_fileobj(args[0], encoding='binary', cache=cache) fileobj = ctx.__enter__() except OSError: raise except Exception: fileobj = None else: args = [fileobj] + list(args[1:]) elif hasattr(args[0], 'read'): path = None fileobj = args[0] format = _get_valid_format( 'read', cls, path, fileobj, args, kwargs) reader = get_reader(format, cls) data = reader(*args, **kwargs) if not isinstance(data, cls): # User has read with a subclass where only the parent class is # registered. This returns the parent class, so try coercing # to desired subclass. try: data = cls(data) except Exception: raise TypeError('could not convert reader output to {} ' 'class.'.format(cls.__name__)) finally: if ctx is not None: ctx.__exit__(*sys.exc_info()) return data def write(data, *args, format=None, **kwargs): """ Write out data. The arguments passed to this method depend on the format. """ if format is None: path = None fileobj = None if len(args): if isinstance(args[0], PATH_TYPES): # path might be a os.PathLike object if isinstance(args[0], os.PathLike): args = (os.fspath(args[0]),) + args[1:] path = args[0] fileobj = None elif hasattr(args[0], 'read'): path = None fileobj = args[0] format = _get_valid_format( 'write', data.__class__, path, fileobj, args, kwargs) writer = get_writer(format, data.__class__) writer(data, *args, **kwargs) def _is_best_match(class1, class2, format_classes): """ Determine if class2 is the "best" match for class1 in the list of classes. It is assumed that (class2 in classes) is True. class2 is the the best match if: - ``class1`` is a subclass of ``class2`` AND - ``class2`` is the nearest ancestor of ``class1`` that is in classes (which includes the case that ``class1 is class2``) """ if issubclass(class1, class2): classes = {cls for fmt, cls in format_classes} for parent in class1.__mro__: if parent is class2: # class2 is closest registered ancestor return True if parent in classes: # class2 was superceded return False return False def _get_valid_format(mode, cls, path, fileobj, args, kwargs): """ Returns the first valid format that can be used to read/write the data in question. Mode can be either 'read' or 'write'. """ valid_formats = identify_format(mode, cls, path, fileobj, args, kwargs) if len(valid_formats) == 0: format_table_str = _get_format_table_str(cls, mode.capitalize()) raise IORegistryError("Format could not be identified based on the" " file name or contents, please provide a" " 'format' argument.\n" "The available formats are:\n" "{}".format(format_table_str)) elif len(valid_formats) > 1: return _get_highest_priority_format(mode, cls, valid_formats) return valid_formats[0] def _get_highest_priority_format(mode, cls, valid_formats): """ Returns the reader or writer with the highest priority. If it is a tie, error. """ if mode == "read": format_dict = _readers mode_loader = "reader" elif mode == "write": format_dict = _writers mode_loader = "writer" best_formats = [] current_priority = - np.inf for format in valid_formats: try: _, priority = format_dict[(format, cls)] except KeyError: # We could throw an exception here, but get_reader/get_writer handle # this case better, instead maximally deprioritise the format. priority = - np.inf if priority == current_priority: best_formats.append(format) elif priority > current_priority: best_formats = [format] current_priority = priority if len(best_formats) > 1: raise IORegistryError("Format is ambiguous - options are: {}".format( ', '.join(sorted(valid_formats, key=itemgetter(0))) )) return best_formats[0] class UnifiedReadWrite: """Base class for the worker object used in unified read() or write() methods. This lightweight object is created for each `read()` or `write()` call via ``read`` / ``write`` descriptors on the data object class. The key driver is to allow complete format-specific documentation of available method options via a ``help()`` method, e.g. ``Table.read.help('fits')``. Subclasses must define a ``__call__`` method which is what actually gets called when the data object ``read()`` or ``write()`` method is called. For the canonical example see the `~astropy.table.Table` class implementation (in particular the ``connect.py`` module there). Parameters ---------- instance : object Descriptor calling instance or None if no instance cls : type Descriptor calling class (either owner class or instance class) method_name : str Method name, either 'read' or 'write' """ def __init__(self, instance, cls, method_name): self._instance = instance self._cls = cls self._method_name = method_name # 'read' or 'write' def help(self, format=None, out=None): """Output help documentation for the specified unified I/O ``format``. By default the help output is printed to the console via ``pydoc.pager``. Instead one can supplied a file handle object as ``out`` and the output will be written to that handle. Parameters ---------- format : str Unified I/O format name, e.g. 'fits' or 'ascii.ecsv' out : None or path-like Output destination (default is stdout via a pager) """ cls = self._cls method_name = self._method_name # Get reader or writer function get_func = get_reader if method_name == 'read' else get_writer try: if format: read_write_func = get_func(format, cls) except IORegistryError as err: reader_doc = 'ERROR: ' + str(err) else: if format: # Format-specific header = ("{}.{}(format='{}') documentation\n" .format(cls.__name__, method_name, format)) doc = read_write_func.__doc__ else: # General docs header = f'{cls.__name__}.{method_name} general documentation\n' doc = getattr(cls, method_name).__doc__ reader_doc = re.sub('.', '=', header) reader_doc += header reader_doc += re.sub('.', '=', header) reader_doc += os.linesep if doc is not None: reader_doc += inspect.cleandoc(doc) if out is None: import pydoc pydoc.pager(reader_doc) else: out.write(reader_doc) def list_formats(self, out=None): """Print a list of available formats to console (or ``out`` filehandle) out : None or file handle object Output destination (default is stdout via a pager) """ tbl = get_formats(self._cls, self._method_name.capitalize()) del tbl['Data class'] if out is None: tbl.pprint(max_lines=-1, max_width=-1) else: out.write('\n'.join(tbl.pformat(max_lines=-1, max_width=-1))) return out class UnifiedReadWriteMethod(property): """Descriptor class for creating read() and write() methods in unified I/O. The canonical example is in the ``Table`` class, where the ``connect.py`` module creates subclasses of the ``UnifiedReadWrite`` class. These have custom ``__call__`` methods that do the setup work related to calling the registry read() or write() functions. With this, the ``Table`` class defines read and write methods as follows:: read = UnifiedReadWriteMethod(TableRead) write = UnifiedReadWriteMethod(TableWrite) Parameters ---------- func : `~astropy.io.registry.UnifiedReadWrite` subclass Class that defines read or write functionality """ # We subclass property to ensure that __set__ is defined and that, # therefore, we are a data descriptor, which cannot be overridden. # This also means we automatically inherit the __doc__ of fget (which will # be a UnifiedReadWrite subclass), and that this docstring gets recognized # and properly typeset by sphinx (which was previously an issue; see # gh-11554). # We override __get__ to pass both instance and class to UnifiedReadWrite. def __get__(self, instance, owner_cls): return self.fget(instance, owner_cls)
# Licensed under a 3-clause BSD style license - see LICENSE.rst """Test masked class initialization, methods, and operators. Functions, including ufuncs, are tested in test_functions.py """ import operator import numpy as np from numpy.testing import assert_array_equal import pytest from astropy import units as u from astropy.units import Quantity from astropy.coordinates import Longitude from astropy.utils.masked import Masked, MaskedNDArray def assert_masked_equal(a, b): assert_array_equal(a.unmasked, b.unmasked) assert_array_equal(a.mask, b.mask) VARIOUS_ITEMS = [ (1, 1), slice(None, 1), (), 1] class ArraySetup: _data_cls = np.ndarray @classmethod def setup_class(self): self.a = np.arange(6.).reshape(2, 3) self.mask_a = np.array([[True, False, False], [False, True, False]]) self.b = np.array([-3., -2., -1.]) self.mask_b = np.array([False, True, False]) self.c = np.array([[0.25], [0.5]]) self.mask_c = np.array([[False], [True]]) self.sdt = np.dtype([('a', 'f8'), ('b', 'f8')]) self.mask_sdt = np.dtype([('a', '?'), ('b', '?')]) self.sa = np.array([[(1., 2.), (3., 4.)], [(11., 12.), (13., 14.)]], dtype=self.sdt) self.mask_sa = np.array([[(True, True), (False, False)], [(False, True), (True, False)]], dtype=self.mask_sdt) self.sb = np.array([(1., 2.), (-3., 4.)], dtype=self.sdt) self.mask_sb = np.array([(True, False), (False, False)], dtype=self.mask_sdt) class QuantitySetup(ArraySetup): _data_cls = Quantity @classmethod def setup_class(self): super().setup_class() self.a = Quantity(self.a, u.m) self.b = Quantity(self.b, u.cm) self.c = Quantity(self.c, u.km) self.sa = Quantity(self.sa, u.m, dtype=self.sdt) self.sb = Quantity(self.sb, u.cm, dtype=self.sdt) class LongitudeSetup(ArraySetup): _data_cls = Longitude @classmethod def setup_class(self): super().setup_class() self.a = Longitude(self.a, u.deg) self.b = Longitude(self.b, u.deg) self.c = Longitude(self.c, u.deg) # Note: Longitude does not work on structured arrays, so # leaving it as regular array (which just reruns some tests). class TestMaskedArrayInitialization(ArraySetup): def test_simple(self): ma = Masked(self.a, mask=self.mask_a) assert isinstance(ma, np.ndarray) assert isinstance(ma, type(self.a)) assert isinstance(ma, Masked) assert_array_equal(ma.unmasked, self.a) assert_array_equal(ma.mask, self.mask_a) assert ma.mask is not self.mask_a assert np.may_share_memory(ma.mask, self.mask_a) def test_structured(self): ma = Masked(self.sa, mask=self.mask_sa) assert isinstance(ma, np.ndarray) assert isinstance(ma, type(self.sa)) assert isinstance(ma, Masked) assert_array_equal(ma.unmasked, self.sa) assert_array_equal(ma.mask, self.mask_sa) assert ma.mask is not self.mask_sa assert np.may_share_memory(ma.mask, self.mask_sa) def test_masked_ndarray_init(): # Note: as a straight ndarray subclass, MaskedNDArray passes on # the arguments relevant for np.ndarray, not np.array. a_in = np.arange(3, dtype=int) m_in = np.array([True, False, False]) buff = a_in.tobytes() # Check we're doing things correctly using regular ndarray. a = np.ndarray(shape=(3,), dtype=int, buffer=buff) assert_array_equal(a, a_in) # Check with and without mask. ma = MaskedNDArray((3,), dtype=int, mask=m_in, buffer=buff) assert_array_equal(ma.unmasked, a_in) assert_array_equal(ma.mask, m_in) ma = MaskedNDArray((3,), dtype=int, buffer=buff) assert_array_equal(ma.unmasked, a_in) assert_array_equal(ma.mask, np.zeros(3, bool)) def test_cannot_initialize_with_masked(): with pytest.raises(ValueError, match='cannot handle np.ma.masked'): Masked(np.ma.masked) def test_cannot_just_use_anything_with_a_mask_attribute(): class my_array(np.ndarray): mask = True a = np.array([1., 2.]).view(my_array) with pytest.raises(AttributeError, match='unmasked'): Masked(a) class TestMaskedClassCreation: """Try creating a MaskedList and subclasses. By no means meant to be realistic, just to check that the basic machinery allows it. """ @classmethod def setup_class(self): self._base_classes_orig = Masked._base_classes.copy() self._masked_classes_orig = Masked._masked_classes.copy() class MaskedList(Masked, list, base_cls=list, data_cls=list): def __new__(cls, *args, mask=None, copy=False, **kwargs): self = super().__new__(cls) self._unmasked = self._data_cls(*args, **kwargs) self.mask = mask return self # Need to have shape for basics to work. @property def shape(self): return (len(self._unmasked),) self.MaskedList = MaskedList def teardown_class(self): Masked._base_classes = self._base_classes_orig Masked._masked_classes = self._masked_classes_orig def test_setup(self): assert issubclass(self.MaskedList, Masked) assert issubclass(self.MaskedList, list) assert Masked(list) is self.MaskedList def test_masked_list(self): ml = self.MaskedList(range(3), mask=[True, False, False]) assert ml.unmasked == [0, 1, 2] assert_array_equal(ml.mask, np.array([True, False, False])) ml01 = ml[:2] assert ml01.unmasked == [0, 1] assert_array_equal(ml01.mask, np.array([True, False])) def test_from_list(self): ml = Masked([1, 2, 3], mask=[True, False, False]) assert ml.unmasked == [1, 2, 3] assert_array_equal(ml.mask, np.array([True, False, False])) def test_masked_list_subclass(self): class MyList(list): pass ml = MyList(range(3)) mml = Masked(ml, mask=[False, True, False]) assert isinstance(mml, Masked) assert isinstance(mml, MyList) assert isinstance(mml.unmasked, MyList) assert mml.unmasked == [0, 1, 2] assert_array_equal(mml.mask, np.array([False, True, False])) assert Masked(MyList) is type(mml) class TestMaskedNDArraySubclassCreation: """Test that masked subclasses can be created directly and indirectly.""" @classmethod def setup_class(self): class MyArray(np.ndarray): def __new__(cls, *args, **kwargs): return np.asanyarray(*args, **kwargs).view(cls) self.MyArray = MyArray self.a = np.array([1., 2.]).view(self.MyArray) self.m = np.array([True, False], dtype=bool) def teardown_method(self, method): Masked._masked_classes.pop(self.MyArray, None) def test_direct_creation(self): assert self.MyArray not in Masked._masked_classes mcls = Masked(self.MyArray) assert issubclass(mcls, Masked) assert issubclass(mcls, self.MyArray) assert mcls.__name__ == 'MaskedMyArray' assert mcls.__doc__.startswith('Masked version of MyArray') mms = mcls(self.a, mask=self.m) assert isinstance(mms, mcls) assert_array_equal(mms.unmasked, self.a) assert_array_equal(mms.mask, self.m) def test_initialization_without_mask(self): # Default for not giving a mask should be False. mcls = Masked(self.MyArray) mms = mcls(self.a) assert isinstance(mms, mcls) assert_array_equal(mms.unmasked, self.a) assert_array_equal(mms.mask, np.zeros(mms.shape, bool)) @pytest.mark.parametrize('masked_array', [Masked, np.ma.MaskedArray]) def test_initialization_with_masked_values(self, masked_array): mcls = Masked(self.MyArray) ma = masked_array(np.asarray(self.a), mask=self.m) mms = mcls(ma) assert isinstance(mms, Masked) assert isinstance(mms, self.MyArray) assert_array_equal(mms.unmasked, self.a) assert_array_equal(mms.mask, self.m) def test_indirect_creation(self): assert self.MyArray not in Masked._masked_classes mms = Masked(self.a, mask=self.m) assert isinstance(mms, Masked) assert isinstance(mms, self.MyArray) assert_array_equal(mms.unmasked, self.a) assert_array_equal(mms.mask, self.m) assert self.MyArray in Masked._masked_classes assert Masked(self.MyArray) is type(mms) def test_can_initialize_with_masked_values(self): mcls = Masked(self.MyArray) mms = mcls(Masked(np.asarray(self.a), mask=self.m)) assert isinstance(mms, Masked) assert isinstance(mms, self.MyArray) assert_array_equal(mms.unmasked, self.a) assert_array_equal(mms.mask, self.m) def test_viewing(self): mms = Masked(self.a, mask=self.m) mms2 = mms.view() assert type(mms2) is mms.__class__ assert_masked_equal(mms2, mms) ma = mms.view(np.ndarray) assert type(ma) is MaskedNDArray assert_array_equal(ma.unmasked, self.a.view(np.ndarray)) assert_array_equal(ma.mask, self.m) class TestMaskedQuantityInitialization(TestMaskedArrayInitialization, QuantitySetup): def test_masked_quantity_class_init(self): # TODO: class definitions should be more easily accessible. mcls = Masked._masked_classes[self.a.__class__] # This is not a very careful test. mq = mcls([1., 2.], mask=[True, False], unit=u.s) assert mq.unit == u.s assert np.all(mq.value.unmasked == [1., 2.]) assert np.all(mq.value.mask == [True, False]) assert np.all(mq.mask == [True, False]) def test_masked_quantity_getting(self): mcls = Masked._masked_classes[self.a.__class__] MQ = Masked(Quantity) assert MQ is mcls def test_initialization_without_mask(self): # Default for not giving a mask should be False. MQ = Masked(Quantity) mq = MQ([1., 2.], u.s) assert mq.unit == u.s assert np.all(mq.value.unmasked == [1., 2.]) assert np.all(mq.mask == [False, False]) @pytest.mark.parametrize('masked_array', [Masked, np.ma.MaskedArray]) def test_initialization_with_masked_values(self, masked_array): MQ = Masked(Quantity) a = np.array([1., 2.]) m = np.array([True, False]) ma = masked_array(a, m) mq = MQ(ma) assert isinstance(mq, Masked) assert isinstance(mq, Quantity) assert_array_equal(mq.value.unmasked, a) assert_array_equal(mq.mask, m) class TestMaskSetting(ArraySetup): def test_whole_mask_setting_simple(self): ma = Masked(self.a) assert ma.mask.shape == ma.shape assert not ma.mask.any() ma.mask = True assert ma.mask.shape == ma.shape assert ma.mask.all() ma.mask = [[True], [False]] assert ma.mask.shape == ma.shape assert_array_equal(ma.mask, np.array([[True] * 3, [False] * 3])) ma.mask = self.mask_a assert ma.mask.shape == ma.shape assert_array_equal(ma.mask, self.mask_a) assert ma.mask is not self.mask_a assert np.may_share_memory(ma.mask, self.mask_a) def test_whole_mask_setting_structured(self): ma = Masked(self.sa) assert ma.mask.shape == ma.shape assert not ma.mask['a'].any() and not ma.mask['b'].any() ma.mask = True assert ma.mask.shape == ma.shape assert ma.mask['a'].all() and ma.mask['b'].all() ma.mask = [[True], [False]] assert ma.mask.shape == ma.shape assert_array_equal(ma.mask, np.array( [[(True, True)] * 2, [(False, False)] * 2], dtype=self.mask_sdt)) ma.mask = self.mask_sa assert ma.mask.shape == ma.shape assert_array_equal(ma.mask, self.mask_sa) assert ma.mask is not self.mask_sa assert np.may_share_memory(ma.mask, self.mask_sa) @pytest.mark.parametrize('item', VARIOUS_ITEMS) def test_part_mask_setting(self, item): ma = Masked(self.a) ma.mask[item] = True expected = np.zeros(ma.shape, bool) expected[item] = True assert_array_equal(ma.mask, expected) ma.mask[item] = False assert_array_equal(ma.mask, np.zeros(ma.shape, bool)) # Mask propagation mask = np.zeros(self.a.shape, bool) ma = Masked(self.a, mask) ma.mask[item] = True assert np.may_share_memory(ma.mask, mask) assert_array_equal(ma.mask, mask) @pytest.mark.parametrize('item', ['a'] + VARIOUS_ITEMS) def test_part_mask_setting_structured(self, item): ma = Masked(self.sa) ma.mask[item] = True expected = np.zeros(ma.shape, self.mask_sdt) expected[item] = True assert_array_equal(ma.mask, expected) ma.mask[item] = False assert_array_equal(ma.mask, np.zeros(ma.shape, self.mask_sdt)) # Mask propagation mask = np.zeros(self.sa.shape, self.mask_sdt) ma = Masked(self.sa, mask) ma.mask[item] = True assert np.may_share_memory(ma.mask, mask) assert_array_equal(ma.mask, mask) # Following are tests where we trust the initializer works. class MaskedArraySetup(ArraySetup): @classmethod def setup_class(self): super().setup_class() self.ma = Masked(self.a, mask=self.mask_a) self.mb = Masked(self.b, mask=self.mask_b) self.mc = Masked(self.c, mask=self.mask_c) self.msa = Masked(self.sa, mask=self.mask_sa) self.msb = Masked(self.sb, mask=self.mask_sb) class TestViewing(MaskedArraySetup): def test_viewing_as_new_type(self): ma2 = self.ma.view(type(self.ma)) assert_masked_equal(ma2, self.ma) ma3 = self.ma.view() assert_masked_equal(ma3, self.ma) def test_viewing_as_new_dtype(self): # Not very meaningful, but possible... ma2 = self.ma.view('c8') assert_array_equal(ma2.unmasked, self.a.view('c8')) assert_array_equal(ma2.mask, self.mask_a) @pytest.mark.parametrize('new_dtype', ['2f4', 'f8,f8,f8']) def test_viewing_as_new_dtype_not_implemented(self, new_dtype): # But cannot (yet) view in way that would need to create a new mask, # even though that view is possible for a regular array. check = self.a.view(new_dtype) with pytest.raises(NotImplementedError, match='different.*size'): self.ma.view(check.dtype) def test_viewing_as_something_impossible(self): with pytest.raises(TypeError): # Use intp to ensure have the same size as object, # otherwise we get a different error message Masked(np.array([1, 2], dtype=np.intp)).view(Masked) class TestMaskedArrayCopyFilled(MaskedArraySetup): def test_copy(self): ma_copy = self.ma.copy() assert type(ma_copy) is type(self.ma) assert_array_equal(ma_copy.unmasked, self.ma.unmasked) assert_array_equal(ma_copy.mask, self.ma.mask) assert not np.may_share_memory(ma_copy.unmasked, self.ma.unmasked) assert not np.may_share_memory(ma_copy.mask, self.ma.mask) @pytest.mark.parametrize('fill_value', (0, 1)) def test_filled(self, fill_value): fill_value = fill_value * getattr(self.a, 'unit', 1) expected = self.a.copy() expected[self.ma.mask] = fill_value result = self.ma.filled(fill_value) assert_array_equal(expected, result) def test_filled_no_fill_value(self): with pytest.raises(TypeError, match='missing 1 required'): self.ma.filled() @pytest.mark.parametrize('fill_value', [(0, 1), (-1, -1)]) def test_filled_structured(self, fill_value): fill_value = np.array(fill_value, dtype=self.sdt) if hasattr(self.sa, 'unit'): fill_value = fill_value << self.sa.unit expected = self.sa.copy() expected['a'][self.msa.mask['a']] = fill_value['a'] expected['b'][self.msa.mask['b']] = fill_value['b'] result = self.msa.filled(fill_value) assert_array_equal(expected, result) def test_flat(self): ma_copy = self.ma.copy() ma_flat = ma_copy.flat # Check that single item keeps class and mask ma_flat1 = ma_flat[1] assert ma_flat1.unmasked == self.a.flat[1] assert ma_flat1.mask == self.mask_a.flat[1] # As well as getting items via iteration. assert all((ma.unmasked == a and ma.mask == m) for (ma, a, m) in zip(self.ma.flat, self.a.flat, self.mask_a.flat)) # check that flat works like a view of the real array ma_flat[1] = self.b[1] assert ma_flat[1] == self.b[1] assert ma_copy[0, 1] == self.b[1] class TestMaskedQuantityCopyFilled(TestMaskedArrayCopyFilled, QuantitySetup): pass class TestMaskedLongitudeCopyFilled(TestMaskedArrayCopyFilled, LongitudeSetup): pass class TestMaskedArrayShaping(MaskedArraySetup): def test_reshape(self): ma_reshape = self.ma.reshape((6,)) expected_data = self.a.reshape((6,)) expected_mask = self.mask_a.reshape((6,)) assert ma_reshape.shape == expected_data.shape assert_array_equal(ma_reshape.unmasked, expected_data) assert_array_equal(ma_reshape.mask, expected_mask) def test_shape_setting(self): ma_reshape = self.ma.copy() ma_reshape.shape = 6, expected_data = self.a.reshape((6,)) expected_mask = self.mask_a.reshape((6,)) assert ma_reshape.shape == expected_data.shape assert_array_equal(ma_reshape.unmasked, expected_data) assert_array_equal(ma_reshape.mask, expected_mask) def test_shape_setting_failure(self): ma = self.ma.copy() with pytest.raises(ValueError, match='cannot reshape'): ma.shape = 5, assert ma.shape == self.ma.shape assert ma.mask.shape == self.ma.shape # Here, mask can be reshaped but array cannot. ma2 = Masked(np.broadcast_to([[1.], [2.]], self.a.shape), mask=self.mask_a) with pytest.raises(AttributeError, match='ncompatible shape'): ma2.shape = 6, assert ma2.shape == self.ma.shape assert ma2.mask.shape == self.ma.shape # Here, array can be reshaped but mask cannot. ma3 = Masked(self.a.copy(), mask=np.broadcast_to([[True], [False]], self.mask_a.shape)) with pytest.raises(AttributeError, match='ncompatible shape'): ma3.shape = 6, assert ma3.shape == self.ma.shape assert ma3.mask.shape == self.ma.shape def test_ravel(self): ma_ravel = self.ma.ravel() expected_data = self.a.ravel() expected_mask = self.mask_a.ravel() assert ma_ravel.shape == expected_data.shape assert_array_equal(ma_ravel.unmasked, expected_data) assert_array_equal(ma_ravel.mask, expected_mask) def test_transpose(self): ma_transpose = self.ma.transpose() expected_data = self.a.transpose() expected_mask = self.mask_a.transpose() assert ma_transpose.shape == expected_data.shape assert_array_equal(ma_transpose.unmasked, expected_data) assert_array_equal(ma_transpose.mask, expected_mask) def test_iter(self): for ma, d, m in zip(self.ma, self.a, self.mask_a): assert_array_equal(ma.unmasked, d) assert_array_equal(ma.mask, m) class MaskedItemTests(MaskedArraySetup): @pytest.mark.parametrize('item', VARIOUS_ITEMS) def test_getitem(self, item): ma_part = self.ma[item] expected_data = self.a[item] expected_mask = self.mask_a[item] assert_array_equal(ma_part.unmasked, expected_data) assert_array_equal(ma_part.mask, expected_mask) @pytest.mark.parametrize('item', ['a'] + VARIOUS_ITEMS) def test_getitem_structured(self, item): ma_part = self.msa[item] expected_data = self.sa[item] expected_mask = self.mask_sa[item] assert_array_equal(ma_part.unmasked, expected_data) assert_array_equal(ma_part.mask, expected_mask) @pytest.mark.parametrize('indices,axis', [ ([0, 1], 1), ([0, 1], 0), ([0, 1], None), ([[0, 1], [2, 3]], None)]) def test_take(self, indices, axis): ma_take = self.ma.take(indices, axis=axis) expected_data = self.a.take(indices, axis=axis) expected_mask = self.mask_a.take(indices, axis=axis) assert_array_equal(ma_take.unmasked, expected_data) assert_array_equal(ma_take.mask, expected_mask) ma_take2 = np.take(self.ma, indices, axis=axis) assert_masked_equal(ma_take2, ma_take) @pytest.mark.parametrize('item', VARIOUS_ITEMS) @pytest.mark.parametrize('mask', [None, True, False]) def test_setitem(self, item, mask): base = self.ma.copy() expected_data = self.a.copy() expected_mask = self.mask_a.copy() value = self.a[0, 0] if mask is None else Masked(self.a[0, 0], mask) base[item] = value expected_data[item] = value if mask is None else value.unmasked expected_mask[item] = False if mask is None else value.mask assert_array_equal(base.unmasked, expected_data) assert_array_equal(base.mask, expected_mask) @pytest.mark.parametrize('item', ['a'] + VARIOUS_ITEMS) @pytest.mark.parametrize('mask', [None, True, False]) def test_setitem_structured(self, item, mask): base = self.msa.copy() expected_data = self.sa.copy() expected_mask = self.mask_sa.copy() value = self.sa['b'] if item == 'a' else self.sa[0, 0] if mask is not None: value = Masked(value, mask) base[item] = value expected_data[item] = value if mask is None else value.unmasked expected_mask[item] = False if mask is None else value.mask assert_array_equal(base.unmasked, expected_data) assert_array_equal(base.mask, expected_mask) @pytest.mark.parametrize('item', VARIOUS_ITEMS) def test_setitem_np_ma_masked(self, item): base = self.ma.copy() expected_mask = self.mask_a.copy() base[item] = np.ma.masked expected_mask[item] = True assert_array_equal(base.unmasked, self.a) assert_array_equal(base.mask, expected_mask) class TestMaskedArrayItems(MaskedItemTests): @classmethod def setup_class(self): super().setup_class() self.d = np.array(['aa', 'bb']) self.mask_d = np.array([True, False]) self.md = Masked(self.d, self.mask_d) # Quantity, Longitude cannot hold strings. def test_getitem_strings(self): md = self.md.copy() md0 = md[0] assert md0.unmasked == self.d[0] assert md0.mask md_all = md[:] assert_masked_equal(md_all, md) def test_setitem_strings_np_ma_masked(self): md = self.md.copy() md[1] = np.ma.masked assert_array_equal(md.unmasked, self.d) assert_array_equal(md.mask, np.ones(2, bool)) class TestMaskedQuantityItems(MaskedItemTests, QuantitySetup): pass class TestMaskedLongitudeItems(MaskedItemTests, LongitudeSetup): pass class MaskedOperatorTests(MaskedArraySetup): @pytest.mark.parametrize('op', (operator.add, operator.sub)) def test_add_subtract(self, op): mapmb = op(self.ma, self.mb) expected_data = op(self.a, self.b) expected_mask = (self.ma.mask | self.mb.mask) # Note: assert_array_equal also checks type, i.e., that, e.g., # Longitude decays into an Angle. assert_array_equal(mapmb.unmasked, expected_data) assert_array_equal(mapmb.mask, expected_mask) @pytest.mark.parametrize('op', (operator.eq, operator.ne)) def test_equality(self, op): mapmb = op(self.ma, self.mb) expected_data = op(self.a, self.b) expected_mask = (self.ma.mask | self.mb.mask) # Note: assert_array_equal also checks type, i.e., that boolean # output is represented as plain Masked ndarray. assert_array_equal(mapmb.unmasked, expected_data) assert_array_equal(mapmb.mask, expected_mask) def test_not_implemented(self): with pytest.raises(TypeError): self.ma > 'abc' @pytest.mark.parametrize('different_names', [False, True]) @pytest.mark.parametrize('op', (operator.eq, operator.ne)) def test_structured_equality(self, op, different_names): msb = self.msb if different_names: msb = msb.astype([(f'different_{name}', dt) for name, dt in msb.dtype.fields.items()]) mapmb = op(self.msa, self.msb) # Expected is a bit tricky here: only unmasked fields count expected_data = np.ones(mapmb.shape, bool) expected_mask = np.ones(mapmb.shape, bool) for field in self.sdt.names: fa, mfa = self.sa[field], self.mask_sa[field] fb, mfb = self.sb[field], self.mask_sb[field] mfequal = mfa | mfb fequal = (fa == fb) | mfequal expected_data &= fequal expected_mask &= mfequal if op is operator.ne: expected_data = ~expected_data # Note: assert_array_equal also checks type, i.e., that boolean # output is represented as plain Masked ndarray. assert_array_equal(mapmb.unmasked, expected_data) assert_array_equal(mapmb.mask, expected_mask) def test_matmul(self): result = self.ma.T @ self.ma assert_array_equal(result.unmasked, self.a.T @ self.a) mask1 = np.any(self.mask_a, axis=0) expected_mask = np.logical_or.outer(mask1, mask1) assert_array_equal(result.mask, expected_mask) result2 = self.ma.T @ self.a assert_array_equal(result2.unmasked, self.a.T @ self.a) expected_mask2 = np.logical_or.outer(mask1, np.zeros(3, bool)) assert_array_equal(result2.mask, expected_mask2) result3 = self.a.T @ self.ma assert_array_equal(result3.unmasked, self.a.T @ self.a) expected_mask3 = np.logical_or.outer(np.zeros(3, bool), mask1) assert_array_equal(result3.mask, expected_mask3) def test_matvec(self): result = self.ma @ self.mb assert np.all(result.mask) assert_array_equal(result.unmasked, self.a @ self.b) # Just using the masked vector still has all elements masked. result2 = self.a @ self.mb assert np.all(result2.mask) assert_array_equal(result2.unmasked, self.a @ self.b) new_ma = self.ma.copy() new_ma.mask[0, 0] = False result3 = new_ma @ self.b assert_array_equal(result3.unmasked, self.a @ self.b) assert_array_equal(result3.mask, new_ma.mask.any(-1)) def test_vecmat(self): result = self.mb @ self.ma.T assert np.all(result.mask) assert_array_equal(result.unmasked, self.b @ self.a.T) result2 = self.b @ self.ma.T assert np.all(result2.mask) assert_array_equal(result2.unmasked, self.b @ self.a.T) new_ma = self.ma.T.copy() new_ma.mask[0, 0] = False result3 = self.b @ new_ma assert_array_equal(result3.unmasked, self.b @ self.a.T) assert_array_equal(result3.mask, new_ma.mask.any(0)) def test_vecvec(self): result = self.mb @ self.mb assert result.shape == () assert result.mask assert result.unmasked == self.b @ self.b mb_no_mask = Masked(self.b, False) result2 = mb_no_mask @ mb_no_mask assert not result2.mask class TestMaskedArrayOperators(MaskedOperatorTests): # Some further tests that use strings, which are not useful for Quantity. @pytest.mark.parametrize('op', (operator.eq, operator.ne)) def test_equality_strings(self, op): m1 = Masked(np.array(['a', 'b', 'c']), mask=[True, False, False]) m2 = Masked(np.array(['a', 'b', 'd']), mask=[False, False, False]) result = op(m1, m2) assert_array_equal(result.unmasked, op(m1.unmasked, m2.unmasked)) assert_array_equal(result.mask, m1.mask | m2.mask) result2 = op(m1, m2.unmasked) assert_masked_equal(result2, result) def test_not_implemented(self): with pytest.raises(TypeError): Masked(['a', 'b']) > object() class TestMaskedQuantityOperators(MaskedOperatorTests, QuantitySetup): pass class TestMaskedLongitudeOperators(MaskedOperatorTests, LongitudeSetup): pass class TestMaskedArrayMethods(MaskedArraySetup): def test_round(self): # Goes via ufunc, hence easy. mrc = self.mc.round() expected = Masked(self.c.round(), self.mask_c) assert_masked_equal(mrc, expected) @pytest.mark.parametrize('axis', (0, 1, None)) def test_sum(self, axis): ma_sum = self.ma.sum(axis) expected_data = self.a.sum(axis) expected_mask = self.ma.mask.any(axis) assert_array_equal(ma_sum.unmasked, expected_data) assert_array_equal(ma_sum.mask, expected_mask) @pytest.mark.parametrize('axis', (0, 1, None)) def test_cumsum(self, axis): ma_sum = self.ma.cumsum(axis) expected_data = self.a.cumsum(axis) mask = self.mask_a if axis is None: mask = mask.ravel() expected_mask = np.logical_or.accumulate(mask, axis=axis) assert_array_equal(ma_sum.unmasked, expected_data) assert_array_equal(ma_sum.mask, expected_mask) @pytest.mark.parametrize('axis', (0, 1, None)) def test_mean(self, axis): ma_mean = self.ma.mean(axis) filled = self.a.copy() filled[self.mask_a] = 0. count = 1 - self.ma.mask.astype(int) expected_data = filled.sum(axis) / count.sum(axis) expected_mask = self.ma.mask.all(axis) assert_array_equal(ma_mean.unmasked, expected_data) assert_array_equal(ma_mean.mask, expected_mask) def test_mean_int16(self): ma = self.ma.astype('i2') ma_mean = ma.mean() assert ma_mean.dtype == 'f8' expected = ma.astype('f8').mean() assert_masked_equal(ma_mean, expected) def test_mean_float16(self): ma = self.ma.astype('f2') ma_mean = ma.mean() assert ma_mean.dtype == 'f2' expected = self.ma.mean().astype('f2') assert_masked_equal(ma_mean, expected) def test_mean_inplace(self): expected = self.ma.mean(1) out = Masked(np.zeros_like(expected.unmasked)) result = self.ma.mean(1, out=out) assert result is out assert_masked_equal(out, expected) @pytest.mark.filterwarnings("ignore:.*true_divide.*") @pytest.mark.parametrize('axis', (0, 1, None)) def test_var(self, axis): ma_var = self.ma.var(axis) filled = (self.a - self.ma.mean(axis, keepdims=True))**2 filled[self.mask_a] = 0. count = (1 - self.ma.mask.astype(int)).sum(axis) expected_data = filled.sum(axis) / count expected_mask = self.ma.mask.all(axis) assert_array_equal(ma_var.unmasked, expected_data) assert_array_equal(ma_var.mask, expected_mask) ma_var1 = self.ma.var(axis, ddof=1) expected_data1 = filled.sum(axis) / (count - 1) expected_mask1 = self.ma.mask.all(axis) | (count <= 1) assert_array_equal(ma_var1.unmasked, expected_data1) assert_array_equal(ma_var1.mask, expected_mask1) ma_var5 = self.ma.var(axis, ddof=5) assert np.all(~np.isfinite(ma_var5.unmasked)) assert ma_var5.mask.all() def test_var_int16(self): ma = self.ma.astype('i2') ma_var = ma.var() assert ma_var.dtype == 'f8' expected = ma.astype('f8').var() assert_masked_equal(ma_var, expected) def test_std(self): ma_std = self.ma.std(1, ddof=1) ma_var1 = self.ma.var(1, ddof=1) expected = np.sqrt(ma_var1) assert_masked_equal(ma_std, expected) def test_std_inplace(self): expected = self.ma.std(1, ddof=1) out = Masked(np.zeros_like(expected.unmasked)) result = self.ma.std(1, ddof=1, out=out) assert result is out assert_masked_equal(result, expected) @pytest.mark.parametrize('axis', (0, 1, None)) def test_min(self, axis): ma_min = self.ma.min(axis) filled = self.a.copy() filled[self.mask_a] = self.a.max() expected_data = filled.min(axis) assert_array_equal(ma_min.unmasked, expected_data) assert not np.any(ma_min.mask) @pytest.mark.parametrize('axis', (0, 1, None)) def test_max(self, axis): ma_max = self.ma.max(axis) filled = self.a.copy() filled[self.mask_a] = self.a.min() expected_data = filled.max(axis) assert_array_equal(ma_max.unmasked, expected_data) assert not np.any(ma_max.mask) @pytest.mark.parametrize('axis', (0, 1, None)) def test_argmin(self, axis): ma_argmin = self.ma.argmin(axis) filled = self.a.copy() filled[self.mask_a] = self.a.max() expected_data = filled.argmin(axis) assert_array_equal(ma_argmin, expected_data) def test_argmin_only_one_unmasked_element(self): # Regression test for example from @taldcroft at # https://github.com/astropy/astropy/pull/11127#discussion_r600864559 ma = Masked(data=[1, 2], mask=[True, False]) assert ma.argmin() == 1 @pytest.mark.parametrize('axis', (0, 1, None)) def test_argmax(self, axis): ma_argmax = self.ma.argmax(axis) filled = self.a.copy() filled[self.mask_a] = self.a.min() expected_data = filled.argmax(axis) assert_array_equal(ma_argmax, expected_data) @pytest.mark.parametrize('axis', (0, 1, None)) def test_argsort(self, axis): ma_argsort = self.ma.argsort(axis) filled = self.a.copy() filled[self.mask_a] = self.a.max() * 1.1 expected_data = filled.argsort(axis) assert_array_equal(ma_argsort, expected_data) @pytest.mark.parametrize('order', [None, 'a', ('a', 'b'), ('b', 'a')]) @pytest.mark.parametrize('axis', [0, 1]) def test_structured_argsort(self, axis, order): ma_argsort = self.msa.argsort(axis, order=order) filled = self.msa.filled(fill_value=np.array((np.inf, np.inf), dtype=self.sdt)) expected_data = filled.argsort(axis, order=order) assert_array_equal(ma_argsort, expected_data) def test_argsort_error(self): with pytest.raises(ValueError, match='when the array has no fields'): self.ma.argsort(axis=0, order='a') @pytest.mark.parametrize('axis', (0, 1)) def test_sort(self, axis): ma_sort = self.ma.copy() ma_sort.sort(axis) indices = self.ma.argsort(axis) expected_data = np.take_along_axis(self.ma.unmasked, indices, axis) expected_mask = np.take_along_axis(self.ma.mask, indices, axis) assert_array_equal(ma_sort.unmasked, expected_data) assert_array_equal(ma_sort.mask, expected_mask) @pytest.mark.parametrize('kth', [1, 3]) def test_argpartition(self, kth): ma = self.ma.ravel() ma_argpartition = ma.argpartition(kth) partitioned = ma[ma_argpartition] assert (partitioned[:kth] < partitioned[kth]).all() assert (partitioned[kth:] >= partitioned[kth]).all() if partitioned[kth].mask: assert all(partitioned.mask[kth:]) else: assert not any(partitioned.mask[:kth]) @pytest.mark.parametrize('kth', [1, 3]) def test_partition(self, kth): partitioned = self.ma.flatten() partitioned.partition(kth) assert (partitioned[:kth] < partitioned[kth]).all() assert (partitioned[kth:] >= partitioned[kth]).all() if partitioned[kth].mask: assert all(partitioned.mask[kth:]) else: assert not any(partitioned.mask[:kth]) def test_all_explicit(self): a1 = np.array([[1., 2.], [3., 4.]]) a2 = np.array([[1., 0.], [3., 4.]]) if self._data_cls is not np.ndarray: a1 = self._data_cls(a1, self.a.unit) a2 = self._data_cls(a2, self.a.unit) ma1 = Masked(a1, mask=[[False, False], [True, True]]) ma2 = Masked(a2, mask=[[False, True], [False, True]]) ma1_eq_ma2 = ma1 == ma2 assert_array_equal(ma1_eq_ma2.unmasked, np.array([[True, False], [True, True]])) assert_array_equal(ma1_eq_ma2.mask, np.array([[False, True], [True, True]])) assert ma1_eq_ma2.all() assert not (ma1 != ma2).all() ma_eq1 = ma1_eq_ma2.all(1) assert_array_equal(ma_eq1.mask, np.array([False, True])) assert bool(ma_eq1[0]) is True assert bool(ma_eq1[1]) is False ma_eq0 = ma1_eq_ma2.all(0) assert_array_equal(ma_eq0.mask, np.array([False, True])) assert bool(ma_eq1[0]) is True assert bool(ma_eq1[1]) is False @pytest.mark.parametrize('method', ['any', 'all']) @pytest.mark.parametrize('array,axis', [ ('a', 0), ('a', 1), ('a', None), ('b', None), ('c', 0), ('c', 1), ('c', None)]) def test_all_and_any(self, array, axis, method): ma = getattr(self, 'm'+array) ma_eq = ma == ma ma_all_or_any = getattr(ma_eq, method)(axis=axis) filled = ma_eq.unmasked.copy() filled[ma_eq.mask] = method == 'all' a_all_or_any = getattr(filled, method)(axis=axis) all_masked = ma.mask.all(axis) assert_array_equal(ma_all_or_any.mask, all_masked) assert_array_equal(ma_all_or_any.unmasked, a_all_or_any) # interpretation as bool as_bool = [bool(a) for a in ma_all_or_any.ravel()] expected = [bool(a) for a in (a_all_or_any & ~all_masked).ravel()] assert as_bool == expected def test_any_inplace(self): ma_eq = self.ma == self.ma expected = ma_eq.any(1) out = Masked(np.zeros_like(expected.unmasked)) result = ma_eq.any(1, out=out) assert result is out assert_masked_equal(result, expected) @pytest.mark.parametrize('offset', (0, 1)) def test_diagonal(self, offset): mda = self.ma.diagonal(offset=offset) expected = Masked(self.a.diagonal(offset=offset), self.mask_a.diagonal(offset=offset)) assert_masked_equal(mda, expected) @pytest.mark.parametrize('offset', (0, 1)) def test_trace(self, offset): mta = self.ma.trace(offset=offset) expected = Masked(self.a.trace(offset=offset), self.mask_a.trace(offset=offset, dtype=bool)) assert_masked_equal(mta, expected) def test_clip(self): maclip = self.ma.clip(self.b, self.c) expected = Masked(self.a.clip(self.b, self.c), self.mask_a) assert_masked_equal(maclip, expected) def test_clip_masked_min_max(self): maclip = self.ma.clip(self.mb, self.mc) # Need to be careful with min, max because of Longitude, which wraps. dmax = np.maximum(np.maximum(self.a, self.b), self.c).max() dmin = np.minimum(np.minimum(self.a, self.b), self.c).min() expected = Masked(self.a.clip(self.mb.filled(dmin), self.mc.filled(dmax)), mask=self.mask_a) assert_masked_equal(maclip, expected) class TestMaskedQuantityMethods(TestMaskedArrayMethods, QuantitySetup): pass class TestMaskedLongitudeMethods(TestMaskedArrayMethods, LongitudeSetup): pass class TestMaskedArrayProductMethods(MaskedArraySetup): # These cannot work on Quantity, so done separately @pytest.mark.parametrize('axis', (0, 1, None)) def test_prod(self, axis): ma_sum = self.ma.prod(axis) expected_data = self.a.prod(axis) expected_mask = self.ma.mask.any(axis) assert_array_equal(ma_sum.unmasked, expected_data) assert_array_equal(ma_sum.mask, expected_mask) @pytest.mark.parametrize('axis', (0, 1, None)) def test_cumprod(self, axis): ma_sum = self.ma.cumprod(axis) expected_data = self.a.cumprod(axis) mask = self.mask_a if axis is None: mask = mask.ravel() expected_mask = np.logical_or.accumulate(mask, axis=axis) assert_array_equal(ma_sum.unmasked, expected_data) assert_array_equal(ma_sum.mask, expected_mask) def test_masked_str_explicit(): sa = np.array([(1., 2.), (3., 4.)], dtype='f8,f8') msa = Masked(sa, [(False, True), (False, False)]) assert str(msa) == "[(1., ——) (3., 4.)]" assert str(msa[0]) == "(1., ——)" assert str(msa[1]) == "(3., 4.)" with np.printoptions(precision=3, floatmode='fixed'): assert str(msa) == "[(1.000, ———) (3.000, 4.000)]" def test_masked_repr_explicit(): # Use explicit endianness to ensure tests pass on all architectures sa = np.array([(1., 2.), (3., 4.)], dtype='>f8,>f8') msa = Masked(sa, [(False, True), (False, False)]) assert repr(msa) == ("MaskedNDArray([(1., ——), (3., 4.)], " "dtype=[('f0', '>f8'), ('f1', '>f8')])") assert repr(msa[0]) == ("MaskedNDArray((1., ——), " "dtype=[('f0', '>f8'), ('f1', '>f8')])") assert repr(msa[1]) == ("MaskedNDArray((3., 4.), " "dtype=[('f0', '>f8'), ('f1', '>f8')])") def test_masked_repr_summary(): ma = Masked(np.arange(15.), mask=[True]+[False]*14) with np.printoptions(threshold=2): assert repr(ma) == ( "MaskedNDArray([———, 1., 2., ..., 12., 13., 14.])") def test_masked_repr_nodata(): assert repr(Masked([])) == "MaskedNDArray([], dtype=float64)" class TestMaskedArrayRepr(MaskedArraySetup): def test_array_str(self): # very blunt check they work at all. str(self.ma) str(self.mb) str(self.mc) str(self.msa) str(self.msb) def test_scalar_str(self): assert self.mb[0].shape == () str(self.mb[0]) assert self.msb[0].shape == () str(self.msb[0]) def test_array_repr(self): repr(self.ma) repr(self.mb) repr(self.mc) repr(self.msa) repr(self.msb) def test_scalar_repr(self): repr(self.mb[0]) repr(self.msb[0]) class TestMaskedQuantityRepr(TestMaskedArrayRepr, QuantitySetup): pass class TestMaskedRecarray(MaskedArraySetup): @classmethod def setup_class(self): super().setup_class() self.ra = self.sa.view(np.recarray) self.mra = Masked(self.ra, mask=self.mask_sa) def test_recarray_setup(self): assert isinstance(self.mra, Masked) assert isinstance(self.mra, np.recarray) assert np.all(self.mra.unmasked == self.ra) assert np.all(self.mra.mask == self.mask_sa) assert_array_equal(self.mra.view(np.ndarray), self.sa) assert isinstance(self.mra.a, Masked) assert_array_equal(self.mra.a.unmasked, self.sa['a']) assert_array_equal(self.mra.a.mask, self.mask_sa['a']) def test_recarray_setting(self): mra = self.mra.copy() mra.a = self.msa['b'] assert_array_equal(mra.a.unmasked, self.msa['b'].unmasked) assert_array_equal(mra.a.mask, self.msa['b'].mask) @pytest.mark.parametrize('attr', [0, 'a']) def test_recarray_field_getting(self, attr): mra_a = self.mra.field(attr) assert isinstance(mra_a, Masked) assert_array_equal(mra_a.unmasked, self.sa['a']) assert_array_equal(mra_a.mask, self.mask_sa['a']) @pytest.mark.parametrize('attr', [0, 'a']) def test_recarray_field_setting(self, attr): mra = self.mra.copy() mra.field(attr, self.msa['b']) assert_array_equal(mra.a.unmasked, self.msa['b'].unmasked) assert_array_equal(mra.a.mask, self.msa['b'].mask)
pllim/astropy
astropy/utils/masked/tests/test_masked.py
astropy/io/registry.py
import os import abc import numpy as np __all__ = ['BaseLowLevelWCS', 'validate_physical_types'] class BaseLowLevelWCS(metaclass=abc.ABCMeta): """ Abstract base class for the low-level WCS interface. This is described in `APE 14: A shared Python interface for World Coordinate Systems <https://doi.org/10.5281/zenodo.1188875>`_. """ @property @abc.abstractmethod def pixel_n_dim(self): """ The number of axes in the pixel coordinate system. """ @property @abc.abstractmethod def world_n_dim(self): """ The number of axes in the world coordinate system. """ @property @abc.abstractmethod def world_axis_physical_types(self): """ An iterable of strings describing the physical type for each world axis. These should be names from the VO UCD1+ controlled Vocabulary (http://www.ivoa.net/documents/latest/UCDlist.html). If no matching UCD type exists, this can instead be ``"custom:xxx"``, where ``xxx`` is an arbitrary string. Alternatively, if the physical type is unknown/undefined, an element can be `None`. """ @property @abc.abstractmethod def world_axis_units(self): """ An iterable of strings given the units of the world coordinates for each axis. The strings should follow the `IVOA VOUnit standard <http://ivoa.net/documents/VOUnits/>`_ (though as noted in the VOUnit specification document, units that do not follow this standard are still allowed, but just not recommended). """ @abc.abstractmethod def pixel_to_world_values(self, *pixel_arrays): """ Convert pixel coordinates to world coordinates. This method takes `~astropy.wcs.wcsapi.BaseLowLevelWCS.pixel_n_dim` scalars or arrays as input, and pixel coordinates should be zero-based. Returns `~astropy.wcs.wcsapi.BaseLowLevelWCS.world_n_dim` scalars or arrays in units given by `~astropy.wcs.wcsapi.BaseLowLevelWCS.world_axis_units`. Note that pixel coordinates are assumed to be 0 at the center of the first pixel in each dimension. If a pixel is in a region where the WCS is not defined, NaN can be returned. The coordinates should be specified in the ``(x, y)`` order, where for an image, ``x`` is the horizontal coordinate and ``y`` is the vertical coordinate. If `~astropy.wcs.wcsapi.BaseLowLevelWCS.world_n_dim` is ``1``, this method returns a single scalar or array, otherwise a tuple of scalars or arrays is returned. """ def array_index_to_world_values(self, *index_arrays): """ Convert array indices to world coordinates. This is the same as `~astropy.wcs.wcsapi.BaseLowLevelWCS.pixel_to_world_values` except that the indices should be given in ``(i, j)`` order, where for an image ``i`` is the row and ``j`` is the column (i.e. the opposite order to `~astropy.wcs.wcsapi.BaseLowLevelWCS.pixel_to_world_values`). If `~astropy.wcs.wcsapi.BaseLowLevelWCS.world_n_dim` is ``1``, this method returns a single scalar or array, otherwise a tuple of scalars or arrays is returned. """ return self.pixel_to_world_values(*index_arrays[::-1]) @abc.abstractmethod def world_to_pixel_values(self, *world_arrays): """ Convert world coordinates to pixel coordinates. This method takes `~astropy.wcs.wcsapi.BaseLowLevelWCS.world_n_dim` scalars or arrays as input in units given by `~astropy.wcs.wcsapi.BaseLowLevelWCS.world_axis_units`. Returns `~astropy.wcs.wcsapi.BaseLowLevelWCS.pixel_n_dim` scalars or arrays. Note that pixel coordinates are assumed to be 0 at the center of the first pixel in each dimension. If a world coordinate does not have a matching pixel coordinate, NaN can be returned. The coordinates should be returned in the ``(x, y)`` order, where for an image, ``x`` is the horizontal coordinate and ``y`` is the vertical coordinate. If `~astropy.wcs.wcsapi.BaseLowLevelWCS.pixel_n_dim` is ``1``, this method returns a single scalar or array, otherwise a tuple of scalars or arrays is returned. """ def world_to_array_index_values(self, *world_arrays): """ Convert world coordinates to array indices. This is the same as `~astropy.wcs.wcsapi.BaseLowLevelWCS.world_to_pixel_values` except that the indices should be returned in ``(i, j)`` order, where for an image ``i`` is the row and ``j`` is the column (i.e. the opposite order to `~astropy.wcs.wcsapi.BaseLowLevelWCS.pixel_to_world_values`). The indices should be returned as rounded integers. If `~astropy.wcs.wcsapi.BaseLowLevelWCS.pixel_n_dim` is ``1``, this method returns a single scalar or array, otherwise a tuple of scalars or arrays is returned. """ pixel_arrays = self.world_to_pixel_values(*world_arrays) if self.pixel_n_dim == 1: pixel_arrays = (pixel_arrays,) else: pixel_arrays = pixel_arrays[::-1] array_indices = tuple(np.asarray(np.floor(pixel + 0.5), dtype=np.int_) for pixel in pixel_arrays) return array_indices[0] if self.pixel_n_dim == 1 else array_indices @property @abc.abstractmethod def world_axis_object_components(self): """ A list with `~astropy.wcs.wcsapi.BaseLowLevelWCS.world_n_dim` elements giving information on constructing high-level objects for the world coordinates. Each element of the list is a tuple with three items: * The first is a name for the world object this world array corresponds to, which *must* match the string names used in `~astropy.wcs.wcsapi.BaseLowLevelWCS.world_axis_object_classes`. Note that names might appear twice because two world arrays might correspond to a single world object (e.g. a celestial coordinate might have both “ra” and “dec” arrays, which correspond to a single sky coordinate object). * The second element is either a string keyword argument name or a positional index for the corresponding class from `~astropy.wcs.wcsapi.BaseLowLevelWCS.world_axis_object_classes`. * The third argument is a string giving the name of the property to access on the corresponding class from `~astropy.wcs.wcsapi.BaseLowLevelWCS.world_axis_object_classes` in order to get numerical values. Alternatively, this argument can be a callable Python object that taks a high-level coordinate object and returns the numerical values suitable for passing to the low-level WCS transformation methods. See the document `APE 14: A shared Python interface for World Coordinate Systems <https://doi.org/10.5281/zenodo.1188875>`_ for examples. """ @property @abc.abstractmethod def world_axis_object_classes(self): """ A dictionary giving information on constructing high-level objects for the world coordinates. Each key of the dictionary is a string key from `~astropy.wcs.wcsapi.BaseLowLevelWCS.world_axis_object_components`, and each value is a tuple with three elements or four elements: * The first element of the tuple must be a class or a string specifying the fully-qualified name of a class, which will specify the actual Python object to be created. * The second element, should be a tuple specifying the positional arguments required to initialize the class. If `~astropy.wcs.wcsapi.BaseLowLevelWCS.world_axis_object_components` specifies that the world coordinates should be passed as a positional argument, this this tuple should include `None` placeholders for the world coordinates. * The third tuple element must be a dictionary with the keyword arguments required to initialize the class. * Optionally, for advanced use cases, the fourth element (if present) should be a callable Python object that gets called instead of the class and gets passed the positional and keyword arguments. It should return an object of the type of the first element in the tuple. Note that we don't require the classes to be Astropy classes since there is no guarantee that Astropy will have all the classes to represent all kinds of world coordinates. Furthermore, we recommend that the output be kept as human-readable as possible. The classes used here should have the ability to do conversions by passing an instance as the first argument to the same class with different arguments (e.g. ``Time(Time(...), scale='tai')``). This is a requirement for the implementation of the high-level interface. The second and third tuple elements for each value of this dictionary can in turn contain either instances of classes, or if necessary can contain serialized versions that should take the same form as the main classes described above (a tuple with three elements with the fully qualified name of the class, then the positional arguments and the keyword arguments). For low-level API objects implemented in Python, we recommend simply returning the actual objects (not the serialized form) for optimal performance. Implementations should either always or never use serialized classes to represent Python objects, and should indicate which of these they follow using the `~astropy.wcs.wcsapi.BaseLowLevelWCS.serialized_classes` attribute. See the document `APE 14: A shared Python interface for World Coordinate Systems <https://doi.org/10.5281/zenodo.1188875>`_ for examples . """ # The following three properties have default fallback implementations, so # they are not abstract. @property def array_shape(self): """ The shape of the data that the WCS applies to as a tuple of length `~astropy.wcs.wcsapi.BaseLowLevelWCS.pixel_n_dim` in ``(row, column)`` order (the convention for arrays in Python). If the WCS is valid in the context of a dataset with a particular shape, then this property can be used to store the shape of the data. This can be used for example if implementing slicing of WCS objects. This is an optional property, and it should return `None` if a shape is not known or relevant. """ if self.pixel_shape is None: return None else: return self.pixel_shape[::-1] @property def pixel_shape(self): """ The shape of the data that the WCS applies to as a tuple of length `~astropy.wcs.wcsapi.BaseLowLevelWCS.pixel_n_dim` in ``(x, y)`` order (where for an image, ``x`` is the horizontal coordinate and ``y`` is the vertical coordinate). If the WCS is valid in the context of a dataset with a particular shape, then this property can be used to store the shape of the data. This can be used for example if implementing slicing of WCS objects. This is an optional property, and it should return `None` if a shape is not known or relevant. If you are interested in getting a shape that is comparable to that of a Numpy array, you should use `~astropy.wcs.wcsapi.BaseLowLevelWCS.array_shape` instead. """ return None @property def pixel_bounds(self): """ The bounds (in pixel coordinates) inside which the WCS is defined, as a list with `~astropy.wcs.wcsapi.BaseLowLevelWCS.pixel_n_dim` ``(min, max)`` tuples. The bounds should be given in ``[(xmin, xmax), (ymin, ymax)]`` order. WCS solutions are sometimes only guaranteed to be accurate within a certain range of pixel values, for example when defining a WCS that includes fitted distortions. This is an optional property, and it should return `None` if a shape is not known or relevant. """ return None @property def pixel_axis_names(self): """ An iterable of strings describing the name for each pixel axis. If an axis does not have a name, an empty string should be returned (this is the default behavior for all axes if a subclass does not override this property). Note that these names are just for display purposes and are not standardized. """ return [''] * self.pixel_n_dim @property def world_axis_names(self): """ An iterable of strings describing the name for each world axis. If an axis does not have a name, an empty string should be returned (this is the default behavior for all axes if a subclass does not override this property). Note that these names are just for display purposes and are not standardized. For standardized axis types, see `~astropy.wcs.wcsapi.BaseLowLevelWCS.world_axis_physical_types`. """ return [''] * self.world_n_dim @property def axis_correlation_matrix(self): """ Returns an (`~astropy.wcs.wcsapi.BaseLowLevelWCS.world_n_dim`, `~astropy.wcs.wcsapi.BaseLowLevelWCS.pixel_n_dim`) matrix that indicates using booleans whether a given world coordinate depends on a given pixel coordinate. This defaults to a matrix where all elements are `True` in the absence of any further information. For completely independent axes, the diagonal would be `True` and all other entries `False`. """ return np.ones((self.world_n_dim, self.pixel_n_dim), dtype=bool) @property def serialized_classes(self): """ Indicates whether Python objects are given in serialized form or as actual Python objects. """ return False def _as_mpl_axes(self): """ Compatibility hook for Matplotlib and WCSAxes. With this method, one can do:: from astropy.wcs import WCS import matplotlib.pyplot as plt wcs = WCS('filename.fits') fig = plt.figure() ax = fig.add_axes([0.15, 0.1, 0.8, 0.8], projection=wcs) ... and this will generate a plot with the correct WCS coordinates on the axes. """ from astropy.visualization.wcsaxes import WCSAxes return WCSAxes, {'wcs': self} UCDS_FILE = os.path.join(os.path.dirname(__file__), 'data', 'ucds.txt') with open(UCDS_FILE) as f: VALID_UCDS = set([x.strip() for x in f.read().splitlines()[1:]]) def validate_physical_types(physical_types): """ Validate a list of physical types against the UCD1+ standard """ for physical_type in physical_types: if (physical_type is not None and physical_type not in VALID_UCDS and not physical_type.startswith('custom:')): raise ValueError( f"'{physical_type}' is not a valid IOVA UCD1+ physical type. " "It must be a string specified in the list (http://www.ivoa.net/documents/latest/UCDlist.html) " "or if no matching type exists it can be any string prepended with 'custom:'." )
# Licensed under a 3-clause BSD style license - see LICENSE.rst """Test masked class initialization, methods, and operators. Functions, including ufuncs, are tested in test_functions.py """ import operator import numpy as np from numpy.testing import assert_array_equal import pytest from astropy import units as u from astropy.units import Quantity from astropy.coordinates import Longitude from astropy.utils.masked import Masked, MaskedNDArray def assert_masked_equal(a, b): assert_array_equal(a.unmasked, b.unmasked) assert_array_equal(a.mask, b.mask) VARIOUS_ITEMS = [ (1, 1), slice(None, 1), (), 1] class ArraySetup: _data_cls = np.ndarray @classmethod def setup_class(self): self.a = np.arange(6.).reshape(2, 3) self.mask_a = np.array([[True, False, False], [False, True, False]]) self.b = np.array([-3., -2., -1.]) self.mask_b = np.array([False, True, False]) self.c = np.array([[0.25], [0.5]]) self.mask_c = np.array([[False], [True]]) self.sdt = np.dtype([('a', 'f8'), ('b', 'f8')]) self.mask_sdt = np.dtype([('a', '?'), ('b', '?')]) self.sa = np.array([[(1., 2.), (3., 4.)], [(11., 12.), (13., 14.)]], dtype=self.sdt) self.mask_sa = np.array([[(True, True), (False, False)], [(False, True), (True, False)]], dtype=self.mask_sdt) self.sb = np.array([(1., 2.), (-3., 4.)], dtype=self.sdt) self.mask_sb = np.array([(True, False), (False, False)], dtype=self.mask_sdt) class QuantitySetup(ArraySetup): _data_cls = Quantity @classmethod def setup_class(self): super().setup_class() self.a = Quantity(self.a, u.m) self.b = Quantity(self.b, u.cm) self.c = Quantity(self.c, u.km) self.sa = Quantity(self.sa, u.m, dtype=self.sdt) self.sb = Quantity(self.sb, u.cm, dtype=self.sdt) class LongitudeSetup(ArraySetup): _data_cls = Longitude @classmethod def setup_class(self): super().setup_class() self.a = Longitude(self.a, u.deg) self.b = Longitude(self.b, u.deg) self.c = Longitude(self.c, u.deg) # Note: Longitude does not work on structured arrays, so # leaving it as regular array (which just reruns some tests). class TestMaskedArrayInitialization(ArraySetup): def test_simple(self): ma = Masked(self.a, mask=self.mask_a) assert isinstance(ma, np.ndarray) assert isinstance(ma, type(self.a)) assert isinstance(ma, Masked) assert_array_equal(ma.unmasked, self.a) assert_array_equal(ma.mask, self.mask_a) assert ma.mask is not self.mask_a assert np.may_share_memory(ma.mask, self.mask_a) def test_structured(self): ma = Masked(self.sa, mask=self.mask_sa) assert isinstance(ma, np.ndarray) assert isinstance(ma, type(self.sa)) assert isinstance(ma, Masked) assert_array_equal(ma.unmasked, self.sa) assert_array_equal(ma.mask, self.mask_sa) assert ma.mask is not self.mask_sa assert np.may_share_memory(ma.mask, self.mask_sa) def test_masked_ndarray_init(): # Note: as a straight ndarray subclass, MaskedNDArray passes on # the arguments relevant for np.ndarray, not np.array. a_in = np.arange(3, dtype=int) m_in = np.array([True, False, False]) buff = a_in.tobytes() # Check we're doing things correctly using regular ndarray. a = np.ndarray(shape=(3,), dtype=int, buffer=buff) assert_array_equal(a, a_in) # Check with and without mask. ma = MaskedNDArray((3,), dtype=int, mask=m_in, buffer=buff) assert_array_equal(ma.unmasked, a_in) assert_array_equal(ma.mask, m_in) ma = MaskedNDArray((3,), dtype=int, buffer=buff) assert_array_equal(ma.unmasked, a_in) assert_array_equal(ma.mask, np.zeros(3, bool)) def test_cannot_initialize_with_masked(): with pytest.raises(ValueError, match='cannot handle np.ma.masked'): Masked(np.ma.masked) def test_cannot_just_use_anything_with_a_mask_attribute(): class my_array(np.ndarray): mask = True a = np.array([1., 2.]).view(my_array) with pytest.raises(AttributeError, match='unmasked'): Masked(a) class TestMaskedClassCreation: """Try creating a MaskedList and subclasses. By no means meant to be realistic, just to check that the basic machinery allows it. """ @classmethod def setup_class(self): self._base_classes_orig = Masked._base_classes.copy() self._masked_classes_orig = Masked._masked_classes.copy() class MaskedList(Masked, list, base_cls=list, data_cls=list): def __new__(cls, *args, mask=None, copy=False, **kwargs): self = super().__new__(cls) self._unmasked = self._data_cls(*args, **kwargs) self.mask = mask return self # Need to have shape for basics to work. @property def shape(self): return (len(self._unmasked),) self.MaskedList = MaskedList def teardown_class(self): Masked._base_classes = self._base_classes_orig Masked._masked_classes = self._masked_classes_orig def test_setup(self): assert issubclass(self.MaskedList, Masked) assert issubclass(self.MaskedList, list) assert Masked(list) is self.MaskedList def test_masked_list(self): ml = self.MaskedList(range(3), mask=[True, False, False]) assert ml.unmasked == [0, 1, 2] assert_array_equal(ml.mask, np.array([True, False, False])) ml01 = ml[:2] assert ml01.unmasked == [0, 1] assert_array_equal(ml01.mask, np.array([True, False])) def test_from_list(self): ml = Masked([1, 2, 3], mask=[True, False, False]) assert ml.unmasked == [1, 2, 3] assert_array_equal(ml.mask, np.array([True, False, False])) def test_masked_list_subclass(self): class MyList(list): pass ml = MyList(range(3)) mml = Masked(ml, mask=[False, True, False]) assert isinstance(mml, Masked) assert isinstance(mml, MyList) assert isinstance(mml.unmasked, MyList) assert mml.unmasked == [0, 1, 2] assert_array_equal(mml.mask, np.array([False, True, False])) assert Masked(MyList) is type(mml) class TestMaskedNDArraySubclassCreation: """Test that masked subclasses can be created directly and indirectly.""" @classmethod def setup_class(self): class MyArray(np.ndarray): def __new__(cls, *args, **kwargs): return np.asanyarray(*args, **kwargs).view(cls) self.MyArray = MyArray self.a = np.array([1., 2.]).view(self.MyArray) self.m = np.array([True, False], dtype=bool) def teardown_method(self, method): Masked._masked_classes.pop(self.MyArray, None) def test_direct_creation(self): assert self.MyArray not in Masked._masked_classes mcls = Masked(self.MyArray) assert issubclass(mcls, Masked) assert issubclass(mcls, self.MyArray) assert mcls.__name__ == 'MaskedMyArray' assert mcls.__doc__.startswith('Masked version of MyArray') mms = mcls(self.a, mask=self.m) assert isinstance(mms, mcls) assert_array_equal(mms.unmasked, self.a) assert_array_equal(mms.mask, self.m) def test_initialization_without_mask(self): # Default for not giving a mask should be False. mcls = Masked(self.MyArray) mms = mcls(self.a) assert isinstance(mms, mcls) assert_array_equal(mms.unmasked, self.a) assert_array_equal(mms.mask, np.zeros(mms.shape, bool)) @pytest.mark.parametrize('masked_array', [Masked, np.ma.MaskedArray]) def test_initialization_with_masked_values(self, masked_array): mcls = Masked(self.MyArray) ma = masked_array(np.asarray(self.a), mask=self.m) mms = mcls(ma) assert isinstance(mms, Masked) assert isinstance(mms, self.MyArray) assert_array_equal(mms.unmasked, self.a) assert_array_equal(mms.mask, self.m) def test_indirect_creation(self): assert self.MyArray not in Masked._masked_classes mms = Masked(self.a, mask=self.m) assert isinstance(mms, Masked) assert isinstance(mms, self.MyArray) assert_array_equal(mms.unmasked, self.a) assert_array_equal(mms.mask, self.m) assert self.MyArray in Masked._masked_classes assert Masked(self.MyArray) is type(mms) def test_can_initialize_with_masked_values(self): mcls = Masked(self.MyArray) mms = mcls(Masked(np.asarray(self.a), mask=self.m)) assert isinstance(mms, Masked) assert isinstance(mms, self.MyArray) assert_array_equal(mms.unmasked, self.a) assert_array_equal(mms.mask, self.m) def test_viewing(self): mms = Masked(self.a, mask=self.m) mms2 = mms.view() assert type(mms2) is mms.__class__ assert_masked_equal(mms2, mms) ma = mms.view(np.ndarray) assert type(ma) is MaskedNDArray assert_array_equal(ma.unmasked, self.a.view(np.ndarray)) assert_array_equal(ma.mask, self.m) class TestMaskedQuantityInitialization(TestMaskedArrayInitialization, QuantitySetup): def test_masked_quantity_class_init(self): # TODO: class definitions should be more easily accessible. mcls = Masked._masked_classes[self.a.__class__] # This is not a very careful test. mq = mcls([1., 2.], mask=[True, False], unit=u.s) assert mq.unit == u.s assert np.all(mq.value.unmasked == [1., 2.]) assert np.all(mq.value.mask == [True, False]) assert np.all(mq.mask == [True, False]) def test_masked_quantity_getting(self): mcls = Masked._masked_classes[self.a.__class__] MQ = Masked(Quantity) assert MQ is mcls def test_initialization_without_mask(self): # Default for not giving a mask should be False. MQ = Masked(Quantity) mq = MQ([1., 2.], u.s) assert mq.unit == u.s assert np.all(mq.value.unmasked == [1., 2.]) assert np.all(mq.mask == [False, False]) @pytest.mark.parametrize('masked_array', [Masked, np.ma.MaskedArray]) def test_initialization_with_masked_values(self, masked_array): MQ = Masked(Quantity) a = np.array([1., 2.]) m = np.array([True, False]) ma = masked_array(a, m) mq = MQ(ma) assert isinstance(mq, Masked) assert isinstance(mq, Quantity) assert_array_equal(mq.value.unmasked, a) assert_array_equal(mq.mask, m) class TestMaskSetting(ArraySetup): def test_whole_mask_setting_simple(self): ma = Masked(self.a) assert ma.mask.shape == ma.shape assert not ma.mask.any() ma.mask = True assert ma.mask.shape == ma.shape assert ma.mask.all() ma.mask = [[True], [False]] assert ma.mask.shape == ma.shape assert_array_equal(ma.mask, np.array([[True] * 3, [False] * 3])) ma.mask = self.mask_a assert ma.mask.shape == ma.shape assert_array_equal(ma.mask, self.mask_a) assert ma.mask is not self.mask_a assert np.may_share_memory(ma.mask, self.mask_a) def test_whole_mask_setting_structured(self): ma = Masked(self.sa) assert ma.mask.shape == ma.shape assert not ma.mask['a'].any() and not ma.mask['b'].any() ma.mask = True assert ma.mask.shape == ma.shape assert ma.mask['a'].all() and ma.mask['b'].all() ma.mask = [[True], [False]] assert ma.mask.shape == ma.shape assert_array_equal(ma.mask, np.array( [[(True, True)] * 2, [(False, False)] * 2], dtype=self.mask_sdt)) ma.mask = self.mask_sa assert ma.mask.shape == ma.shape assert_array_equal(ma.mask, self.mask_sa) assert ma.mask is not self.mask_sa assert np.may_share_memory(ma.mask, self.mask_sa) @pytest.mark.parametrize('item', VARIOUS_ITEMS) def test_part_mask_setting(self, item): ma = Masked(self.a) ma.mask[item] = True expected = np.zeros(ma.shape, bool) expected[item] = True assert_array_equal(ma.mask, expected) ma.mask[item] = False assert_array_equal(ma.mask, np.zeros(ma.shape, bool)) # Mask propagation mask = np.zeros(self.a.shape, bool) ma = Masked(self.a, mask) ma.mask[item] = True assert np.may_share_memory(ma.mask, mask) assert_array_equal(ma.mask, mask) @pytest.mark.parametrize('item', ['a'] + VARIOUS_ITEMS) def test_part_mask_setting_structured(self, item): ma = Masked(self.sa) ma.mask[item] = True expected = np.zeros(ma.shape, self.mask_sdt) expected[item] = True assert_array_equal(ma.mask, expected) ma.mask[item] = False assert_array_equal(ma.mask, np.zeros(ma.shape, self.mask_sdt)) # Mask propagation mask = np.zeros(self.sa.shape, self.mask_sdt) ma = Masked(self.sa, mask) ma.mask[item] = True assert np.may_share_memory(ma.mask, mask) assert_array_equal(ma.mask, mask) # Following are tests where we trust the initializer works. class MaskedArraySetup(ArraySetup): @classmethod def setup_class(self): super().setup_class() self.ma = Masked(self.a, mask=self.mask_a) self.mb = Masked(self.b, mask=self.mask_b) self.mc = Masked(self.c, mask=self.mask_c) self.msa = Masked(self.sa, mask=self.mask_sa) self.msb = Masked(self.sb, mask=self.mask_sb) class TestViewing(MaskedArraySetup): def test_viewing_as_new_type(self): ma2 = self.ma.view(type(self.ma)) assert_masked_equal(ma2, self.ma) ma3 = self.ma.view() assert_masked_equal(ma3, self.ma) def test_viewing_as_new_dtype(self): # Not very meaningful, but possible... ma2 = self.ma.view('c8') assert_array_equal(ma2.unmasked, self.a.view('c8')) assert_array_equal(ma2.mask, self.mask_a) @pytest.mark.parametrize('new_dtype', ['2f4', 'f8,f8,f8']) def test_viewing_as_new_dtype_not_implemented(self, new_dtype): # But cannot (yet) view in way that would need to create a new mask, # even though that view is possible for a regular array. check = self.a.view(new_dtype) with pytest.raises(NotImplementedError, match='different.*size'): self.ma.view(check.dtype) def test_viewing_as_something_impossible(self): with pytest.raises(TypeError): # Use intp to ensure have the same size as object, # otherwise we get a different error message Masked(np.array([1, 2], dtype=np.intp)).view(Masked) class TestMaskedArrayCopyFilled(MaskedArraySetup): def test_copy(self): ma_copy = self.ma.copy() assert type(ma_copy) is type(self.ma) assert_array_equal(ma_copy.unmasked, self.ma.unmasked) assert_array_equal(ma_copy.mask, self.ma.mask) assert not np.may_share_memory(ma_copy.unmasked, self.ma.unmasked) assert not np.may_share_memory(ma_copy.mask, self.ma.mask) @pytest.mark.parametrize('fill_value', (0, 1)) def test_filled(self, fill_value): fill_value = fill_value * getattr(self.a, 'unit', 1) expected = self.a.copy() expected[self.ma.mask] = fill_value result = self.ma.filled(fill_value) assert_array_equal(expected, result) def test_filled_no_fill_value(self): with pytest.raises(TypeError, match='missing 1 required'): self.ma.filled() @pytest.mark.parametrize('fill_value', [(0, 1), (-1, -1)]) def test_filled_structured(self, fill_value): fill_value = np.array(fill_value, dtype=self.sdt) if hasattr(self.sa, 'unit'): fill_value = fill_value << self.sa.unit expected = self.sa.copy() expected['a'][self.msa.mask['a']] = fill_value['a'] expected['b'][self.msa.mask['b']] = fill_value['b'] result = self.msa.filled(fill_value) assert_array_equal(expected, result) def test_flat(self): ma_copy = self.ma.copy() ma_flat = ma_copy.flat # Check that single item keeps class and mask ma_flat1 = ma_flat[1] assert ma_flat1.unmasked == self.a.flat[1] assert ma_flat1.mask == self.mask_a.flat[1] # As well as getting items via iteration. assert all((ma.unmasked == a and ma.mask == m) for (ma, a, m) in zip(self.ma.flat, self.a.flat, self.mask_a.flat)) # check that flat works like a view of the real array ma_flat[1] = self.b[1] assert ma_flat[1] == self.b[1] assert ma_copy[0, 1] == self.b[1] class TestMaskedQuantityCopyFilled(TestMaskedArrayCopyFilled, QuantitySetup): pass class TestMaskedLongitudeCopyFilled(TestMaskedArrayCopyFilled, LongitudeSetup): pass class TestMaskedArrayShaping(MaskedArraySetup): def test_reshape(self): ma_reshape = self.ma.reshape((6,)) expected_data = self.a.reshape((6,)) expected_mask = self.mask_a.reshape((6,)) assert ma_reshape.shape == expected_data.shape assert_array_equal(ma_reshape.unmasked, expected_data) assert_array_equal(ma_reshape.mask, expected_mask) def test_shape_setting(self): ma_reshape = self.ma.copy() ma_reshape.shape = 6, expected_data = self.a.reshape((6,)) expected_mask = self.mask_a.reshape((6,)) assert ma_reshape.shape == expected_data.shape assert_array_equal(ma_reshape.unmasked, expected_data) assert_array_equal(ma_reshape.mask, expected_mask) def test_shape_setting_failure(self): ma = self.ma.copy() with pytest.raises(ValueError, match='cannot reshape'): ma.shape = 5, assert ma.shape == self.ma.shape assert ma.mask.shape == self.ma.shape # Here, mask can be reshaped but array cannot. ma2 = Masked(np.broadcast_to([[1.], [2.]], self.a.shape), mask=self.mask_a) with pytest.raises(AttributeError, match='ncompatible shape'): ma2.shape = 6, assert ma2.shape == self.ma.shape assert ma2.mask.shape == self.ma.shape # Here, array can be reshaped but mask cannot. ma3 = Masked(self.a.copy(), mask=np.broadcast_to([[True], [False]], self.mask_a.shape)) with pytest.raises(AttributeError, match='ncompatible shape'): ma3.shape = 6, assert ma3.shape == self.ma.shape assert ma3.mask.shape == self.ma.shape def test_ravel(self): ma_ravel = self.ma.ravel() expected_data = self.a.ravel() expected_mask = self.mask_a.ravel() assert ma_ravel.shape == expected_data.shape assert_array_equal(ma_ravel.unmasked, expected_data) assert_array_equal(ma_ravel.mask, expected_mask) def test_transpose(self): ma_transpose = self.ma.transpose() expected_data = self.a.transpose() expected_mask = self.mask_a.transpose() assert ma_transpose.shape == expected_data.shape assert_array_equal(ma_transpose.unmasked, expected_data) assert_array_equal(ma_transpose.mask, expected_mask) def test_iter(self): for ma, d, m in zip(self.ma, self.a, self.mask_a): assert_array_equal(ma.unmasked, d) assert_array_equal(ma.mask, m) class MaskedItemTests(MaskedArraySetup): @pytest.mark.parametrize('item', VARIOUS_ITEMS) def test_getitem(self, item): ma_part = self.ma[item] expected_data = self.a[item] expected_mask = self.mask_a[item] assert_array_equal(ma_part.unmasked, expected_data) assert_array_equal(ma_part.mask, expected_mask) @pytest.mark.parametrize('item', ['a'] + VARIOUS_ITEMS) def test_getitem_structured(self, item): ma_part = self.msa[item] expected_data = self.sa[item] expected_mask = self.mask_sa[item] assert_array_equal(ma_part.unmasked, expected_data) assert_array_equal(ma_part.mask, expected_mask) @pytest.mark.parametrize('indices,axis', [ ([0, 1], 1), ([0, 1], 0), ([0, 1], None), ([[0, 1], [2, 3]], None)]) def test_take(self, indices, axis): ma_take = self.ma.take(indices, axis=axis) expected_data = self.a.take(indices, axis=axis) expected_mask = self.mask_a.take(indices, axis=axis) assert_array_equal(ma_take.unmasked, expected_data) assert_array_equal(ma_take.mask, expected_mask) ma_take2 = np.take(self.ma, indices, axis=axis) assert_masked_equal(ma_take2, ma_take) @pytest.mark.parametrize('item', VARIOUS_ITEMS) @pytest.mark.parametrize('mask', [None, True, False]) def test_setitem(self, item, mask): base = self.ma.copy() expected_data = self.a.copy() expected_mask = self.mask_a.copy() value = self.a[0, 0] if mask is None else Masked(self.a[0, 0], mask) base[item] = value expected_data[item] = value if mask is None else value.unmasked expected_mask[item] = False if mask is None else value.mask assert_array_equal(base.unmasked, expected_data) assert_array_equal(base.mask, expected_mask) @pytest.mark.parametrize('item', ['a'] + VARIOUS_ITEMS) @pytest.mark.parametrize('mask', [None, True, False]) def test_setitem_structured(self, item, mask): base = self.msa.copy() expected_data = self.sa.copy() expected_mask = self.mask_sa.copy() value = self.sa['b'] if item == 'a' else self.sa[0, 0] if mask is not None: value = Masked(value, mask) base[item] = value expected_data[item] = value if mask is None else value.unmasked expected_mask[item] = False if mask is None else value.mask assert_array_equal(base.unmasked, expected_data) assert_array_equal(base.mask, expected_mask) @pytest.mark.parametrize('item', VARIOUS_ITEMS) def test_setitem_np_ma_masked(self, item): base = self.ma.copy() expected_mask = self.mask_a.copy() base[item] = np.ma.masked expected_mask[item] = True assert_array_equal(base.unmasked, self.a) assert_array_equal(base.mask, expected_mask) class TestMaskedArrayItems(MaskedItemTests): @classmethod def setup_class(self): super().setup_class() self.d = np.array(['aa', 'bb']) self.mask_d = np.array([True, False]) self.md = Masked(self.d, self.mask_d) # Quantity, Longitude cannot hold strings. def test_getitem_strings(self): md = self.md.copy() md0 = md[0] assert md0.unmasked == self.d[0] assert md0.mask md_all = md[:] assert_masked_equal(md_all, md) def test_setitem_strings_np_ma_masked(self): md = self.md.copy() md[1] = np.ma.masked assert_array_equal(md.unmasked, self.d) assert_array_equal(md.mask, np.ones(2, bool)) class TestMaskedQuantityItems(MaskedItemTests, QuantitySetup): pass class TestMaskedLongitudeItems(MaskedItemTests, LongitudeSetup): pass class MaskedOperatorTests(MaskedArraySetup): @pytest.mark.parametrize('op', (operator.add, operator.sub)) def test_add_subtract(self, op): mapmb = op(self.ma, self.mb) expected_data = op(self.a, self.b) expected_mask = (self.ma.mask | self.mb.mask) # Note: assert_array_equal also checks type, i.e., that, e.g., # Longitude decays into an Angle. assert_array_equal(mapmb.unmasked, expected_data) assert_array_equal(mapmb.mask, expected_mask) @pytest.mark.parametrize('op', (operator.eq, operator.ne)) def test_equality(self, op): mapmb = op(self.ma, self.mb) expected_data = op(self.a, self.b) expected_mask = (self.ma.mask | self.mb.mask) # Note: assert_array_equal also checks type, i.e., that boolean # output is represented as plain Masked ndarray. assert_array_equal(mapmb.unmasked, expected_data) assert_array_equal(mapmb.mask, expected_mask) def test_not_implemented(self): with pytest.raises(TypeError): self.ma > 'abc' @pytest.mark.parametrize('different_names', [False, True]) @pytest.mark.parametrize('op', (operator.eq, operator.ne)) def test_structured_equality(self, op, different_names): msb = self.msb if different_names: msb = msb.astype([(f'different_{name}', dt) for name, dt in msb.dtype.fields.items()]) mapmb = op(self.msa, self.msb) # Expected is a bit tricky here: only unmasked fields count expected_data = np.ones(mapmb.shape, bool) expected_mask = np.ones(mapmb.shape, bool) for field in self.sdt.names: fa, mfa = self.sa[field], self.mask_sa[field] fb, mfb = self.sb[field], self.mask_sb[field] mfequal = mfa | mfb fequal = (fa == fb) | mfequal expected_data &= fequal expected_mask &= mfequal if op is operator.ne: expected_data = ~expected_data # Note: assert_array_equal also checks type, i.e., that boolean # output is represented as plain Masked ndarray. assert_array_equal(mapmb.unmasked, expected_data) assert_array_equal(mapmb.mask, expected_mask) def test_matmul(self): result = self.ma.T @ self.ma assert_array_equal(result.unmasked, self.a.T @ self.a) mask1 = np.any(self.mask_a, axis=0) expected_mask = np.logical_or.outer(mask1, mask1) assert_array_equal(result.mask, expected_mask) result2 = self.ma.T @ self.a assert_array_equal(result2.unmasked, self.a.T @ self.a) expected_mask2 = np.logical_or.outer(mask1, np.zeros(3, bool)) assert_array_equal(result2.mask, expected_mask2) result3 = self.a.T @ self.ma assert_array_equal(result3.unmasked, self.a.T @ self.a) expected_mask3 = np.logical_or.outer(np.zeros(3, bool), mask1) assert_array_equal(result3.mask, expected_mask3) def test_matvec(self): result = self.ma @ self.mb assert np.all(result.mask) assert_array_equal(result.unmasked, self.a @ self.b) # Just using the masked vector still has all elements masked. result2 = self.a @ self.mb assert np.all(result2.mask) assert_array_equal(result2.unmasked, self.a @ self.b) new_ma = self.ma.copy() new_ma.mask[0, 0] = False result3 = new_ma @ self.b assert_array_equal(result3.unmasked, self.a @ self.b) assert_array_equal(result3.mask, new_ma.mask.any(-1)) def test_vecmat(self): result = self.mb @ self.ma.T assert np.all(result.mask) assert_array_equal(result.unmasked, self.b @ self.a.T) result2 = self.b @ self.ma.T assert np.all(result2.mask) assert_array_equal(result2.unmasked, self.b @ self.a.T) new_ma = self.ma.T.copy() new_ma.mask[0, 0] = False result3 = self.b @ new_ma assert_array_equal(result3.unmasked, self.b @ self.a.T) assert_array_equal(result3.mask, new_ma.mask.any(0)) def test_vecvec(self): result = self.mb @ self.mb assert result.shape == () assert result.mask assert result.unmasked == self.b @ self.b mb_no_mask = Masked(self.b, False) result2 = mb_no_mask @ mb_no_mask assert not result2.mask class TestMaskedArrayOperators(MaskedOperatorTests): # Some further tests that use strings, which are not useful for Quantity. @pytest.mark.parametrize('op', (operator.eq, operator.ne)) def test_equality_strings(self, op): m1 = Masked(np.array(['a', 'b', 'c']), mask=[True, False, False]) m2 = Masked(np.array(['a', 'b', 'd']), mask=[False, False, False]) result = op(m1, m2) assert_array_equal(result.unmasked, op(m1.unmasked, m2.unmasked)) assert_array_equal(result.mask, m1.mask | m2.mask) result2 = op(m1, m2.unmasked) assert_masked_equal(result2, result) def test_not_implemented(self): with pytest.raises(TypeError): Masked(['a', 'b']) > object() class TestMaskedQuantityOperators(MaskedOperatorTests, QuantitySetup): pass class TestMaskedLongitudeOperators(MaskedOperatorTests, LongitudeSetup): pass class TestMaskedArrayMethods(MaskedArraySetup): def test_round(self): # Goes via ufunc, hence easy. mrc = self.mc.round() expected = Masked(self.c.round(), self.mask_c) assert_masked_equal(mrc, expected) @pytest.mark.parametrize('axis', (0, 1, None)) def test_sum(self, axis): ma_sum = self.ma.sum(axis) expected_data = self.a.sum(axis) expected_mask = self.ma.mask.any(axis) assert_array_equal(ma_sum.unmasked, expected_data) assert_array_equal(ma_sum.mask, expected_mask) @pytest.mark.parametrize('axis', (0, 1, None)) def test_cumsum(self, axis): ma_sum = self.ma.cumsum(axis) expected_data = self.a.cumsum(axis) mask = self.mask_a if axis is None: mask = mask.ravel() expected_mask = np.logical_or.accumulate(mask, axis=axis) assert_array_equal(ma_sum.unmasked, expected_data) assert_array_equal(ma_sum.mask, expected_mask) @pytest.mark.parametrize('axis', (0, 1, None)) def test_mean(self, axis): ma_mean = self.ma.mean(axis) filled = self.a.copy() filled[self.mask_a] = 0. count = 1 - self.ma.mask.astype(int) expected_data = filled.sum(axis) / count.sum(axis) expected_mask = self.ma.mask.all(axis) assert_array_equal(ma_mean.unmasked, expected_data) assert_array_equal(ma_mean.mask, expected_mask) def test_mean_int16(self): ma = self.ma.astype('i2') ma_mean = ma.mean() assert ma_mean.dtype == 'f8' expected = ma.astype('f8').mean() assert_masked_equal(ma_mean, expected) def test_mean_float16(self): ma = self.ma.astype('f2') ma_mean = ma.mean() assert ma_mean.dtype == 'f2' expected = self.ma.mean().astype('f2') assert_masked_equal(ma_mean, expected) def test_mean_inplace(self): expected = self.ma.mean(1) out = Masked(np.zeros_like(expected.unmasked)) result = self.ma.mean(1, out=out) assert result is out assert_masked_equal(out, expected) @pytest.mark.filterwarnings("ignore:.*true_divide.*") @pytest.mark.parametrize('axis', (0, 1, None)) def test_var(self, axis): ma_var = self.ma.var(axis) filled = (self.a - self.ma.mean(axis, keepdims=True))**2 filled[self.mask_a] = 0. count = (1 - self.ma.mask.astype(int)).sum(axis) expected_data = filled.sum(axis) / count expected_mask = self.ma.mask.all(axis) assert_array_equal(ma_var.unmasked, expected_data) assert_array_equal(ma_var.mask, expected_mask) ma_var1 = self.ma.var(axis, ddof=1) expected_data1 = filled.sum(axis) / (count - 1) expected_mask1 = self.ma.mask.all(axis) | (count <= 1) assert_array_equal(ma_var1.unmasked, expected_data1) assert_array_equal(ma_var1.mask, expected_mask1) ma_var5 = self.ma.var(axis, ddof=5) assert np.all(~np.isfinite(ma_var5.unmasked)) assert ma_var5.mask.all() def test_var_int16(self): ma = self.ma.astype('i2') ma_var = ma.var() assert ma_var.dtype == 'f8' expected = ma.astype('f8').var() assert_masked_equal(ma_var, expected) def test_std(self): ma_std = self.ma.std(1, ddof=1) ma_var1 = self.ma.var(1, ddof=1) expected = np.sqrt(ma_var1) assert_masked_equal(ma_std, expected) def test_std_inplace(self): expected = self.ma.std(1, ddof=1) out = Masked(np.zeros_like(expected.unmasked)) result = self.ma.std(1, ddof=1, out=out) assert result is out assert_masked_equal(result, expected) @pytest.mark.parametrize('axis', (0, 1, None)) def test_min(self, axis): ma_min = self.ma.min(axis) filled = self.a.copy() filled[self.mask_a] = self.a.max() expected_data = filled.min(axis) assert_array_equal(ma_min.unmasked, expected_data) assert not np.any(ma_min.mask) @pytest.mark.parametrize('axis', (0, 1, None)) def test_max(self, axis): ma_max = self.ma.max(axis) filled = self.a.copy() filled[self.mask_a] = self.a.min() expected_data = filled.max(axis) assert_array_equal(ma_max.unmasked, expected_data) assert not np.any(ma_max.mask) @pytest.mark.parametrize('axis', (0, 1, None)) def test_argmin(self, axis): ma_argmin = self.ma.argmin(axis) filled = self.a.copy() filled[self.mask_a] = self.a.max() expected_data = filled.argmin(axis) assert_array_equal(ma_argmin, expected_data) def test_argmin_only_one_unmasked_element(self): # Regression test for example from @taldcroft at # https://github.com/astropy/astropy/pull/11127#discussion_r600864559 ma = Masked(data=[1, 2], mask=[True, False]) assert ma.argmin() == 1 @pytest.mark.parametrize('axis', (0, 1, None)) def test_argmax(self, axis): ma_argmax = self.ma.argmax(axis) filled = self.a.copy() filled[self.mask_a] = self.a.min() expected_data = filled.argmax(axis) assert_array_equal(ma_argmax, expected_data) @pytest.mark.parametrize('axis', (0, 1, None)) def test_argsort(self, axis): ma_argsort = self.ma.argsort(axis) filled = self.a.copy() filled[self.mask_a] = self.a.max() * 1.1 expected_data = filled.argsort(axis) assert_array_equal(ma_argsort, expected_data) @pytest.mark.parametrize('order', [None, 'a', ('a', 'b'), ('b', 'a')]) @pytest.mark.parametrize('axis', [0, 1]) def test_structured_argsort(self, axis, order): ma_argsort = self.msa.argsort(axis, order=order) filled = self.msa.filled(fill_value=np.array((np.inf, np.inf), dtype=self.sdt)) expected_data = filled.argsort(axis, order=order) assert_array_equal(ma_argsort, expected_data) def test_argsort_error(self): with pytest.raises(ValueError, match='when the array has no fields'): self.ma.argsort(axis=0, order='a') @pytest.mark.parametrize('axis', (0, 1)) def test_sort(self, axis): ma_sort = self.ma.copy() ma_sort.sort(axis) indices = self.ma.argsort(axis) expected_data = np.take_along_axis(self.ma.unmasked, indices, axis) expected_mask = np.take_along_axis(self.ma.mask, indices, axis) assert_array_equal(ma_sort.unmasked, expected_data) assert_array_equal(ma_sort.mask, expected_mask) @pytest.mark.parametrize('kth', [1, 3]) def test_argpartition(self, kth): ma = self.ma.ravel() ma_argpartition = ma.argpartition(kth) partitioned = ma[ma_argpartition] assert (partitioned[:kth] < partitioned[kth]).all() assert (partitioned[kth:] >= partitioned[kth]).all() if partitioned[kth].mask: assert all(partitioned.mask[kth:]) else: assert not any(partitioned.mask[:kth]) @pytest.mark.parametrize('kth', [1, 3]) def test_partition(self, kth): partitioned = self.ma.flatten() partitioned.partition(kth) assert (partitioned[:kth] < partitioned[kth]).all() assert (partitioned[kth:] >= partitioned[kth]).all() if partitioned[kth].mask: assert all(partitioned.mask[kth:]) else: assert not any(partitioned.mask[:kth]) def test_all_explicit(self): a1 = np.array([[1., 2.], [3., 4.]]) a2 = np.array([[1., 0.], [3., 4.]]) if self._data_cls is not np.ndarray: a1 = self._data_cls(a1, self.a.unit) a2 = self._data_cls(a2, self.a.unit) ma1 = Masked(a1, mask=[[False, False], [True, True]]) ma2 = Masked(a2, mask=[[False, True], [False, True]]) ma1_eq_ma2 = ma1 == ma2 assert_array_equal(ma1_eq_ma2.unmasked, np.array([[True, False], [True, True]])) assert_array_equal(ma1_eq_ma2.mask, np.array([[False, True], [True, True]])) assert ma1_eq_ma2.all() assert not (ma1 != ma2).all() ma_eq1 = ma1_eq_ma2.all(1) assert_array_equal(ma_eq1.mask, np.array([False, True])) assert bool(ma_eq1[0]) is True assert bool(ma_eq1[1]) is False ma_eq0 = ma1_eq_ma2.all(0) assert_array_equal(ma_eq0.mask, np.array([False, True])) assert bool(ma_eq1[0]) is True assert bool(ma_eq1[1]) is False @pytest.mark.parametrize('method', ['any', 'all']) @pytest.mark.parametrize('array,axis', [ ('a', 0), ('a', 1), ('a', None), ('b', None), ('c', 0), ('c', 1), ('c', None)]) def test_all_and_any(self, array, axis, method): ma = getattr(self, 'm'+array) ma_eq = ma == ma ma_all_or_any = getattr(ma_eq, method)(axis=axis) filled = ma_eq.unmasked.copy() filled[ma_eq.mask] = method == 'all' a_all_or_any = getattr(filled, method)(axis=axis) all_masked = ma.mask.all(axis) assert_array_equal(ma_all_or_any.mask, all_masked) assert_array_equal(ma_all_or_any.unmasked, a_all_or_any) # interpretation as bool as_bool = [bool(a) for a in ma_all_or_any.ravel()] expected = [bool(a) for a in (a_all_or_any & ~all_masked).ravel()] assert as_bool == expected def test_any_inplace(self): ma_eq = self.ma == self.ma expected = ma_eq.any(1) out = Masked(np.zeros_like(expected.unmasked)) result = ma_eq.any(1, out=out) assert result is out assert_masked_equal(result, expected) @pytest.mark.parametrize('offset', (0, 1)) def test_diagonal(self, offset): mda = self.ma.diagonal(offset=offset) expected = Masked(self.a.diagonal(offset=offset), self.mask_a.diagonal(offset=offset)) assert_masked_equal(mda, expected) @pytest.mark.parametrize('offset', (0, 1)) def test_trace(self, offset): mta = self.ma.trace(offset=offset) expected = Masked(self.a.trace(offset=offset), self.mask_a.trace(offset=offset, dtype=bool)) assert_masked_equal(mta, expected) def test_clip(self): maclip = self.ma.clip(self.b, self.c) expected = Masked(self.a.clip(self.b, self.c), self.mask_a) assert_masked_equal(maclip, expected) def test_clip_masked_min_max(self): maclip = self.ma.clip(self.mb, self.mc) # Need to be careful with min, max because of Longitude, which wraps. dmax = np.maximum(np.maximum(self.a, self.b), self.c).max() dmin = np.minimum(np.minimum(self.a, self.b), self.c).min() expected = Masked(self.a.clip(self.mb.filled(dmin), self.mc.filled(dmax)), mask=self.mask_a) assert_masked_equal(maclip, expected) class TestMaskedQuantityMethods(TestMaskedArrayMethods, QuantitySetup): pass class TestMaskedLongitudeMethods(TestMaskedArrayMethods, LongitudeSetup): pass class TestMaskedArrayProductMethods(MaskedArraySetup): # These cannot work on Quantity, so done separately @pytest.mark.parametrize('axis', (0, 1, None)) def test_prod(self, axis): ma_sum = self.ma.prod(axis) expected_data = self.a.prod(axis) expected_mask = self.ma.mask.any(axis) assert_array_equal(ma_sum.unmasked, expected_data) assert_array_equal(ma_sum.mask, expected_mask) @pytest.mark.parametrize('axis', (0, 1, None)) def test_cumprod(self, axis): ma_sum = self.ma.cumprod(axis) expected_data = self.a.cumprod(axis) mask = self.mask_a if axis is None: mask = mask.ravel() expected_mask = np.logical_or.accumulate(mask, axis=axis) assert_array_equal(ma_sum.unmasked, expected_data) assert_array_equal(ma_sum.mask, expected_mask) def test_masked_str_explicit(): sa = np.array([(1., 2.), (3., 4.)], dtype='f8,f8') msa = Masked(sa, [(False, True), (False, False)]) assert str(msa) == "[(1., ——) (3., 4.)]" assert str(msa[0]) == "(1., ——)" assert str(msa[1]) == "(3., 4.)" with np.printoptions(precision=3, floatmode='fixed'): assert str(msa) == "[(1.000, ———) (3.000, 4.000)]" def test_masked_repr_explicit(): # Use explicit endianness to ensure tests pass on all architectures sa = np.array([(1., 2.), (3., 4.)], dtype='>f8,>f8') msa = Masked(sa, [(False, True), (False, False)]) assert repr(msa) == ("MaskedNDArray([(1., ——), (3., 4.)], " "dtype=[('f0', '>f8'), ('f1', '>f8')])") assert repr(msa[0]) == ("MaskedNDArray((1., ——), " "dtype=[('f0', '>f8'), ('f1', '>f8')])") assert repr(msa[1]) == ("MaskedNDArray((3., 4.), " "dtype=[('f0', '>f8'), ('f1', '>f8')])") def test_masked_repr_summary(): ma = Masked(np.arange(15.), mask=[True]+[False]*14) with np.printoptions(threshold=2): assert repr(ma) == ( "MaskedNDArray([———, 1., 2., ..., 12., 13., 14.])") def test_masked_repr_nodata(): assert repr(Masked([])) == "MaskedNDArray([], dtype=float64)" class TestMaskedArrayRepr(MaskedArraySetup): def test_array_str(self): # very blunt check they work at all. str(self.ma) str(self.mb) str(self.mc) str(self.msa) str(self.msb) def test_scalar_str(self): assert self.mb[0].shape == () str(self.mb[0]) assert self.msb[0].shape == () str(self.msb[0]) def test_array_repr(self): repr(self.ma) repr(self.mb) repr(self.mc) repr(self.msa) repr(self.msb) def test_scalar_repr(self): repr(self.mb[0]) repr(self.msb[0]) class TestMaskedQuantityRepr(TestMaskedArrayRepr, QuantitySetup): pass class TestMaskedRecarray(MaskedArraySetup): @classmethod def setup_class(self): super().setup_class() self.ra = self.sa.view(np.recarray) self.mra = Masked(self.ra, mask=self.mask_sa) def test_recarray_setup(self): assert isinstance(self.mra, Masked) assert isinstance(self.mra, np.recarray) assert np.all(self.mra.unmasked == self.ra) assert np.all(self.mra.mask == self.mask_sa) assert_array_equal(self.mra.view(np.ndarray), self.sa) assert isinstance(self.mra.a, Masked) assert_array_equal(self.mra.a.unmasked, self.sa['a']) assert_array_equal(self.mra.a.mask, self.mask_sa['a']) def test_recarray_setting(self): mra = self.mra.copy() mra.a = self.msa['b'] assert_array_equal(mra.a.unmasked, self.msa['b'].unmasked) assert_array_equal(mra.a.mask, self.msa['b'].mask) @pytest.mark.parametrize('attr', [0, 'a']) def test_recarray_field_getting(self, attr): mra_a = self.mra.field(attr) assert isinstance(mra_a, Masked) assert_array_equal(mra_a.unmasked, self.sa['a']) assert_array_equal(mra_a.mask, self.mask_sa['a']) @pytest.mark.parametrize('attr', [0, 'a']) def test_recarray_field_setting(self, attr): mra = self.mra.copy() mra.field(attr, self.msa['b']) assert_array_equal(mra.a.unmasked, self.msa['b'].unmasked) assert_array_equal(mra.a.mask, self.msa['b'].mask)
pllim/astropy
astropy/utils/masked/tests/test_masked.py
astropy/wcs/wcsapi/low_level_api.py
# Licensed under a 3-clause BSD style license - see LICENSE.rst """ Astronomical and physics constants for Astropy v4.0. See :mod:`astropy.constants` for a complete listing of constants defined in Astropy. """ import warnings from astropy.utils import find_current_module from . import utils as _utils from . import codata2018, iau2015 codata = codata2018 iaudata = iau2015 _utils._set_c(codata, iaudata, find_current_module()) # Overwrite the following for consistency. # https://github.com/astropy/astropy/issues/8920 with warnings.catch_warnings(): warnings.filterwarnings('ignore', 'Constant .*already has a definition') # Solar mass (derived from mass parameter and gravitational constant) M_sun = iau2015.IAU2015( 'M_sun', "Solar mass", iau2015.GM_sun.value / codata2018.G.value, 'kg', ((codata2018.G.uncertainty / codata2018.G.value) * (iau2015.GM_sun.value / codata2018.G.value)), f"IAU 2015 Resolution B 3 + {codata2018.G.reference}", system='si') # Jupiter mass (derived from mass parameter and gravitational constant) M_jup = iau2015.IAU2015( 'M_jup', "Jupiter mass", iau2015.GM_jup.value / codata2018.G.value, 'kg', ((codata2018.G.uncertainty / codata2018.G.value) * (iau2015.GM_jup.value / codata2018.G.value)), f"IAU 2015 Resolution B 3 + {codata2018.G.reference}", system='si') # Earth mass (derived from mass parameter and gravitational constant) M_earth = iau2015.IAU2015( 'M_earth', "Earth mass", iau2015.GM_earth.value / codata2018.G.value, 'kg', ((codata2018.G.uncertainty / codata2018.G.value) * (iau2015.GM_earth.value / codata2018.G.value)), f"IAU 2015 Resolution B 3 + {codata2018.G.reference}", system='si') # Clean up namespace del warnings del find_current_module del _utils
# Licensed under a 3-clause BSD style license - see LICENSE.rst """Test masked class initialization, methods, and operators. Functions, including ufuncs, are tested in test_functions.py """ import operator import numpy as np from numpy.testing import assert_array_equal import pytest from astropy import units as u from astropy.units import Quantity from astropy.coordinates import Longitude from astropy.utils.masked import Masked, MaskedNDArray def assert_masked_equal(a, b): assert_array_equal(a.unmasked, b.unmasked) assert_array_equal(a.mask, b.mask) VARIOUS_ITEMS = [ (1, 1), slice(None, 1), (), 1] class ArraySetup: _data_cls = np.ndarray @classmethod def setup_class(self): self.a = np.arange(6.).reshape(2, 3) self.mask_a = np.array([[True, False, False], [False, True, False]]) self.b = np.array([-3., -2., -1.]) self.mask_b = np.array([False, True, False]) self.c = np.array([[0.25], [0.5]]) self.mask_c = np.array([[False], [True]]) self.sdt = np.dtype([('a', 'f8'), ('b', 'f8')]) self.mask_sdt = np.dtype([('a', '?'), ('b', '?')]) self.sa = np.array([[(1., 2.), (3., 4.)], [(11., 12.), (13., 14.)]], dtype=self.sdt) self.mask_sa = np.array([[(True, True), (False, False)], [(False, True), (True, False)]], dtype=self.mask_sdt) self.sb = np.array([(1., 2.), (-3., 4.)], dtype=self.sdt) self.mask_sb = np.array([(True, False), (False, False)], dtype=self.mask_sdt) class QuantitySetup(ArraySetup): _data_cls = Quantity @classmethod def setup_class(self): super().setup_class() self.a = Quantity(self.a, u.m) self.b = Quantity(self.b, u.cm) self.c = Quantity(self.c, u.km) self.sa = Quantity(self.sa, u.m, dtype=self.sdt) self.sb = Quantity(self.sb, u.cm, dtype=self.sdt) class LongitudeSetup(ArraySetup): _data_cls = Longitude @classmethod def setup_class(self): super().setup_class() self.a = Longitude(self.a, u.deg) self.b = Longitude(self.b, u.deg) self.c = Longitude(self.c, u.deg) # Note: Longitude does not work on structured arrays, so # leaving it as regular array (which just reruns some tests). class TestMaskedArrayInitialization(ArraySetup): def test_simple(self): ma = Masked(self.a, mask=self.mask_a) assert isinstance(ma, np.ndarray) assert isinstance(ma, type(self.a)) assert isinstance(ma, Masked) assert_array_equal(ma.unmasked, self.a) assert_array_equal(ma.mask, self.mask_a) assert ma.mask is not self.mask_a assert np.may_share_memory(ma.mask, self.mask_a) def test_structured(self): ma = Masked(self.sa, mask=self.mask_sa) assert isinstance(ma, np.ndarray) assert isinstance(ma, type(self.sa)) assert isinstance(ma, Masked) assert_array_equal(ma.unmasked, self.sa) assert_array_equal(ma.mask, self.mask_sa) assert ma.mask is not self.mask_sa assert np.may_share_memory(ma.mask, self.mask_sa) def test_masked_ndarray_init(): # Note: as a straight ndarray subclass, MaskedNDArray passes on # the arguments relevant for np.ndarray, not np.array. a_in = np.arange(3, dtype=int) m_in = np.array([True, False, False]) buff = a_in.tobytes() # Check we're doing things correctly using regular ndarray. a = np.ndarray(shape=(3,), dtype=int, buffer=buff) assert_array_equal(a, a_in) # Check with and without mask. ma = MaskedNDArray((3,), dtype=int, mask=m_in, buffer=buff) assert_array_equal(ma.unmasked, a_in) assert_array_equal(ma.mask, m_in) ma = MaskedNDArray((3,), dtype=int, buffer=buff) assert_array_equal(ma.unmasked, a_in) assert_array_equal(ma.mask, np.zeros(3, bool)) def test_cannot_initialize_with_masked(): with pytest.raises(ValueError, match='cannot handle np.ma.masked'): Masked(np.ma.masked) def test_cannot_just_use_anything_with_a_mask_attribute(): class my_array(np.ndarray): mask = True a = np.array([1., 2.]).view(my_array) with pytest.raises(AttributeError, match='unmasked'): Masked(a) class TestMaskedClassCreation: """Try creating a MaskedList and subclasses. By no means meant to be realistic, just to check that the basic machinery allows it. """ @classmethod def setup_class(self): self._base_classes_orig = Masked._base_classes.copy() self._masked_classes_orig = Masked._masked_classes.copy() class MaskedList(Masked, list, base_cls=list, data_cls=list): def __new__(cls, *args, mask=None, copy=False, **kwargs): self = super().__new__(cls) self._unmasked = self._data_cls(*args, **kwargs) self.mask = mask return self # Need to have shape for basics to work. @property def shape(self): return (len(self._unmasked),) self.MaskedList = MaskedList def teardown_class(self): Masked._base_classes = self._base_classes_orig Masked._masked_classes = self._masked_classes_orig def test_setup(self): assert issubclass(self.MaskedList, Masked) assert issubclass(self.MaskedList, list) assert Masked(list) is self.MaskedList def test_masked_list(self): ml = self.MaskedList(range(3), mask=[True, False, False]) assert ml.unmasked == [0, 1, 2] assert_array_equal(ml.mask, np.array([True, False, False])) ml01 = ml[:2] assert ml01.unmasked == [0, 1] assert_array_equal(ml01.mask, np.array([True, False])) def test_from_list(self): ml = Masked([1, 2, 3], mask=[True, False, False]) assert ml.unmasked == [1, 2, 3] assert_array_equal(ml.mask, np.array([True, False, False])) def test_masked_list_subclass(self): class MyList(list): pass ml = MyList(range(3)) mml = Masked(ml, mask=[False, True, False]) assert isinstance(mml, Masked) assert isinstance(mml, MyList) assert isinstance(mml.unmasked, MyList) assert mml.unmasked == [0, 1, 2] assert_array_equal(mml.mask, np.array([False, True, False])) assert Masked(MyList) is type(mml) class TestMaskedNDArraySubclassCreation: """Test that masked subclasses can be created directly and indirectly.""" @classmethod def setup_class(self): class MyArray(np.ndarray): def __new__(cls, *args, **kwargs): return np.asanyarray(*args, **kwargs).view(cls) self.MyArray = MyArray self.a = np.array([1., 2.]).view(self.MyArray) self.m = np.array([True, False], dtype=bool) def teardown_method(self, method): Masked._masked_classes.pop(self.MyArray, None) def test_direct_creation(self): assert self.MyArray not in Masked._masked_classes mcls = Masked(self.MyArray) assert issubclass(mcls, Masked) assert issubclass(mcls, self.MyArray) assert mcls.__name__ == 'MaskedMyArray' assert mcls.__doc__.startswith('Masked version of MyArray') mms = mcls(self.a, mask=self.m) assert isinstance(mms, mcls) assert_array_equal(mms.unmasked, self.a) assert_array_equal(mms.mask, self.m) def test_initialization_without_mask(self): # Default for not giving a mask should be False. mcls = Masked(self.MyArray) mms = mcls(self.a) assert isinstance(mms, mcls) assert_array_equal(mms.unmasked, self.a) assert_array_equal(mms.mask, np.zeros(mms.shape, bool)) @pytest.mark.parametrize('masked_array', [Masked, np.ma.MaskedArray]) def test_initialization_with_masked_values(self, masked_array): mcls = Masked(self.MyArray) ma = masked_array(np.asarray(self.a), mask=self.m) mms = mcls(ma) assert isinstance(mms, Masked) assert isinstance(mms, self.MyArray) assert_array_equal(mms.unmasked, self.a) assert_array_equal(mms.mask, self.m) def test_indirect_creation(self): assert self.MyArray not in Masked._masked_classes mms = Masked(self.a, mask=self.m) assert isinstance(mms, Masked) assert isinstance(mms, self.MyArray) assert_array_equal(mms.unmasked, self.a) assert_array_equal(mms.mask, self.m) assert self.MyArray in Masked._masked_classes assert Masked(self.MyArray) is type(mms) def test_can_initialize_with_masked_values(self): mcls = Masked(self.MyArray) mms = mcls(Masked(np.asarray(self.a), mask=self.m)) assert isinstance(mms, Masked) assert isinstance(mms, self.MyArray) assert_array_equal(mms.unmasked, self.a) assert_array_equal(mms.mask, self.m) def test_viewing(self): mms = Masked(self.a, mask=self.m) mms2 = mms.view() assert type(mms2) is mms.__class__ assert_masked_equal(mms2, mms) ma = mms.view(np.ndarray) assert type(ma) is MaskedNDArray assert_array_equal(ma.unmasked, self.a.view(np.ndarray)) assert_array_equal(ma.mask, self.m) class TestMaskedQuantityInitialization(TestMaskedArrayInitialization, QuantitySetup): def test_masked_quantity_class_init(self): # TODO: class definitions should be more easily accessible. mcls = Masked._masked_classes[self.a.__class__] # This is not a very careful test. mq = mcls([1., 2.], mask=[True, False], unit=u.s) assert mq.unit == u.s assert np.all(mq.value.unmasked == [1., 2.]) assert np.all(mq.value.mask == [True, False]) assert np.all(mq.mask == [True, False]) def test_masked_quantity_getting(self): mcls = Masked._masked_classes[self.a.__class__] MQ = Masked(Quantity) assert MQ is mcls def test_initialization_without_mask(self): # Default for not giving a mask should be False. MQ = Masked(Quantity) mq = MQ([1., 2.], u.s) assert mq.unit == u.s assert np.all(mq.value.unmasked == [1., 2.]) assert np.all(mq.mask == [False, False]) @pytest.mark.parametrize('masked_array', [Masked, np.ma.MaskedArray]) def test_initialization_with_masked_values(self, masked_array): MQ = Masked(Quantity) a = np.array([1., 2.]) m = np.array([True, False]) ma = masked_array(a, m) mq = MQ(ma) assert isinstance(mq, Masked) assert isinstance(mq, Quantity) assert_array_equal(mq.value.unmasked, a) assert_array_equal(mq.mask, m) class TestMaskSetting(ArraySetup): def test_whole_mask_setting_simple(self): ma = Masked(self.a) assert ma.mask.shape == ma.shape assert not ma.mask.any() ma.mask = True assert ma.mask.shape == ma.shape assert ma.mask.all() ma.mask = [[True], [False]] assert ma.mask.shape == ma.shape assert_array_equal(ma.mask, np.array([[True] * 3, [False] * 3])) ma.mask = self.mask_a assert ma.mask.shape == ma.shape assert_array_equal(ma.mask, self.mask_a) assert ma.mask is not self.mask_a assert np.may_share_memory(ma.mask, self.mask_a) def test_whole_mask_setting_structured(self): ma = Masked(self.sa) assert ma.mask.shape == ma.shape assert not ma.mask['a'].any() and not ma.mask['b'].any() ma.mask = True assert ma.mask.shape == ma.shape assert ma.mask['a'].all() and ma.mask['b'].all() ma.mask = [[True], [False]] assert ma.mask.shape == ma.shape assert_array_equal(ma.mask, np.array( [[(True, True)] * 2, [(False, False)] * 2], dtype=self.mask_sdt)) ma.mask = self.mask_sa assert ma.mask.shape == ma.shape assert_array_equal(ma.mask, self.mask_sa) assert ma.mask is not self.mask_sa assert np.may_share_memory(ma.mask, self.mask_sa) @pytest.mark.parametrize('item', VARIOUS_ITEMS) def test_part_mask_setting(self, item): ma = Masked(self.a) ma.mask[item] = True expected = np.zeros(ma.shape, bool) expected[item] = True assert_array_equal(ma.mask, expected) ma.mask[item] = False assert_array_equal(ma.mask, np.zeros(ma.shape, bool)) # Mask propagation mask = np.zeros(self.a.shape, bool) ma = Masked(self.a, mask) ma.mask[item] = True assert np.may_share_memory(ma.mask, mask) assert_array_equal(ma.mask, mask) @pytest.mark.parametrize('item', ['a'] + VARIOUS_ITEMS) def test_part_mask_setting_structured(self, item): ma = Masked(self.sa) ma.mask[item] = True expected = np.zeros(ma.shape, self.mask_sdt) expected[item] = True assert_array_equal(ma.mask, expected) ma.mask[item] = False assert_array_equal(ma.mask, np.zeros(ma.shape, self.mask_sdt)) # Mask propagation mask = np.zeros(self.sa.shape, self.mask_sdt) ma = Masked(self.sa, mask) ma.mask[item] = True assert np.may_share_memory(ma.mask, mask) assert_array_equal(ma.mask, mask) # Following are tests where we trust the initializer works. class MaskedArraySetup(ArraySetup): @classmethod def setup_class(self): super().setup_class() self.ma = Masked(self.a, mask=self.mask_a) self.mb = Masked(self.b, mask=self.mask_b) self.mc = Masked(self.c, mask=self.mask_c) self.msa = Masked(self.sa, mask=self.mask_sa) self.msb = Masked(self.sb, mask=self.mask_sb) class TestViewing(MaskedArraySetup): def test_viewing_as_new_type(self): ma2 = self.ma.view(type(self.ma)) assert_masked_equal(ma2, self.ma) ma3 = self.ma.view() assert_masked_equal(ma3, self.ma) def test_viewing_as_new_dtype(self): # Not very meaningful, but possible... ma2 = self.ma.view('c8') assert_array_equal(ma2.unmasked, self.a.view('c8')) assert_array_equal(ma2.mask, self.mask_a) @pytest.mark.parametrize('new_dtype', ['2f4', 'f8,f8,f8']) def test_viewing_as_new_dtype_not_implemented(self, new_dtype): # But cannot (yet) view in way that would need to create a new mask, # even though that view is possible for a regular array. check = self.a.view(new_dtype) with pytest.raises(NotImplementedError, match='different.*size'): self.ma.view(check.dtype) def test_viewing_as_something_impossible(self): with pytest.raises(TypeError): # Use intp to ensure have the same size as object, # otherwise we get a different error message Masked(np.array([1, 2], dtype=np.intp)).view(Masked) class TestMaskedArrayCopyFilled(MaskedArraySetup): def test_copy(self): ma_copy = self.ma.copy() assert type(ma_copy) is type(self.ma) assert_array_equal(ma_copy.unmasked, self.ma.unmasked) assert_array_equal(ma_copy.mask, self.ma.mask) assert not np.may_share_memory(ma_copy.unmasked, self.ma.unmasked) assert not np.may_share_memory(ma_copy.mask, self.ma.mask) @pytest.mark.parametrize('fill_value', (0, 1)) def test_filled(self, fill_value): fill_value = fill_value * getattr(self.a, 'unit', 1) expected = self.a.copy() expected[self.ma.mask] = fill_value result = self.ma.filled(fill_value) assert_array_equal(expected, result) def test_filled_no_fill_value(self): with pytest.raises(TypeError, match='missing 1 required'): self.ma.filled() @pytest.mark.parametrize('fill_value', [(0, 1), (-1, -1)]) def test_filled_structured(self, fill_value): fill_value = np.array(fill_value, dtype=self.sdt) if hasattr(self.sa, 'unit'): fill_value = fill_value << self.sa.unit expected = self.sa.copy() expected['a'][self.msa.mask['a']] = fill_value['a'] expected['b'][self.msa.mask['b']] = fill_value['b'] result = self.msa.filled(fill_value) assert_array_equal(expected, result) def test_flat(self): ma_copy = self.ma.copy() ma_flat = ma_copy.flat # Check that single item keeps class and mask ma_flat1 = ma_flat[1] assert ma_flat1.unmasked == self.a.flat[1] assert ma_flat1.mask == self.mask_a.flat[1] # As well as getting items via iteration. assert all((ma.unmasked == a and ma.mask == m) for (ma, a, m) in zip(self.ma.flat, self.a.flat, self.mask_a.flat)) # check that flat works like a view of the real array ma_flat[1] = self.b[1] assert ma_flat[1] == self.b[1] assert ma_copy[0, 1] == self.b[1] class TestMaskedQuantityCopyFilled(TestMaskedArrayCopyFilled, QuantitySetup): pass class TestMaskedLongitudeCopyFilled(TestMaskedArrayCopyFilled, LongitudeSetup): pass class TestMaskedArrayShaping(MaskedArraySetup): def test_reshape(self): ma_reshape = self.ma.reshape((6,)) expected_data = self.a.reshape((6,)) expected_mask = self.mask_a.reshape((6,)) assert ma_reshape.shape == expected_data.shape assert_array_equal(ma_reshape.unmasked, expected_data) assert_array_equal(ma_reshape.mask, expected_mask) def test_shape_setting(self): ma_reshape = self.ma.copy() ma_reshape.shape = 6, expected_data = self.a.reshape((6,)) expected_mask = self.mask_a.reshape((6,)) assert ma_reshape.shape == expected_data.shape assert_array_equal(ma_reshape.unmasked, expected_data) assert_array_equal(ma_reshape.mask, expected_mask) def test_shape_setting_failure(self): ma = self.ma.copy() with pytest.raises(ValueError, match='cannot reshape'): ma.shape = 5, assert ma.shape == self.ma.shape assert ma.mask.shape == self.ma.shape # Here, mask can be reshaped but array cannot. ma2 = Masked(np.broadcast_to([[1.], [2.]], self.a.shape), mask=self.mask_a) with pytest.raises(AttributeError, match='ncompatible shape'): ma2.shape = 6, assert ma2.shape == self.ma.shape assert ma2.mask.shape == self.ma.shape # Here, array can be reshaped but mask cannot. ma3 = Masked(self.a.copy(), mask=np.broadcast_to([[True], [False]], self.mask_a.shape)) with pytest.raises(AttributeError, match='ncompatible shape'): ma3.shape = 6, assert ma3.shape == self.ma.shape assert ma3.mask.shape == self.ma.shape def test_ravel(self): ma_ravel = self.ma.ravel() expected_data = self.a.ravel() expected_mask = self.mask_a.ravel() assert ma_ravel.shape == expected_data.shape assert_array_equal(ma_ravel.unmasked, expected_data) assert_array_equal(ma_ravel.mask, expected_mask) def test_transpose(self): ma_transpose = self.ma.transpose() expected_data = self.a.transpose() expected_mask = self.mask_a.transpose() assert ma_transpose.shape == expected_data.shape assert_array_equal(ma_transpose.unmasked, expected_data) assert_array_equal(ma_transpose.mask, expected_mask) def test_iter(self): for ma, d, m in zip(self.ma, self.a, self.mask_a): assert_array_equal(ma.unmasked, d) assert_array_equal(ma.mask, m) class MaskedItemTests(MaskedArraySetup): @pytest.mark.parametrize('item', VARIOUS_ITEMS) def test_getitem(self, item): ma_part = self.ma[item] expected_data = self.a[item] expected_mask = self.mask_a[item] assert_array_equal(ma_part.unmasked, expected_data) assert_array_equal(ma_part.mask, expected_mask) @pytest.mark.parametrize('item', ['a'] + VARIOUS_ITEMS) def test_getitem_structured(self, item): ma_part = self.msa[item] expected_data = self.sa[item] expected_mask = self.mask_sa[item] assert_array_equal(ma_part.unmasked, expected_data) assert_array_equal(ma_part.mask, expected_mask) @pytest.mark.parametrize('indices,axis', [ ([0, 1], 1), ([0, 1], 0), ([0, 1], None), ([[0, 1], [2, 3]], None)]) def test_take(self, indices, axis): ma_take = self.ma.take(indices, axis=axis) expected_data = self.a.take(indices, axis=axis) expected_mask = self.mask_a.take(indices, axis=axis) assert_array_equal(ma_take.unmasked, expected_data) assert_array_equal(ma_take.mask, expected_mask) ma_take2 = np.take(self.ma, indices, axis=axis) assert_masked_equal(ma_take2, ma_take) @pytest.mark.parametrize('item', VARIOUS_ITEMS) @pytest.mark.parametrize('mask', [None, True, False]) def test_setitem(self, item, mask): base = self.ma.copy() expected_data = self.a.copy() expected_mask = self.mask_a.copy() value = self.a[0, 0] if mask is None else Masked(self.a[0, 0], mask) base[item] = value expected_data[item] = value if mask is None else value.unmasked expected_mask[item] = False if mask is None else value.mask assert_array_equal(base.unmasked, expected_data) assert_array_equal(base.mask, expected_mask) @pytest.mark.parametrize('item', ['a'] + VARIOUS_ITEMS) @pytest.mark.parametrize('mask', [None, True, False]) def test_setitem_structured(self, item, mask): base = self.msa.copy() expected_data = self.sa.copy() expected_mask = self.mask_sa.copy() value = self.sa['b'] if item == 'a' else self.sa[0, 0] if mask is not None: value = Masked(value, mask) base[item] = value expected_data[item] = value if mask is None else value.unmasked expected_mask[item] = False if mask is None else value.mask assert_array_equal(base.unmasked, expected_data) assert_array_equal(base.mask, expected_mask) @pytest.mark.parametrize('item', VARIOUS_ITEMS) def test_setitem_np_ma_masked(self, item): base = self.ma.copy() expected_mask = self.mask_a.copy() base[item] = np.ma.masked expected_mask[item] = True assert_array_equal(base.unmasked, self.a) assert_array_equal(base.mask, expected_mask) class TestMaskedArrayItems(MaskedItemTests): @classmethod def setup_class(self): super().setup_class() self.d = np.array(['aa', 'bb']) self.mask_d = np.array([True, False]) self.md = Masked(self.d, self.mask_d) # Quantity, Longitude cannot hold strings. def test_getitem_strings(self): md = self.md.copy() md0 = md[0] assert md0.unmasked == self.d[0] assert md0.mask md_all = md[:] assert_masked_equal(md_all, md) def test_setitem_strings_np_ma_masked(self): md = self.md.copy() md[1] = np.ma.masked assert_array_equal(md.unmasked, self.d) assert_array_equal(md.mask, np.ones(2, bool)) class TestMaskedQuantityItems(MaskedItemTests, QuantitySetup): pass class TestMaskedLongitudeItems(MaskedItemTests, LongitudeSetup): pass class MaskedOperatorTests(MaskedArraySetup): @pytest.mark.parametrize('op', (operator.add, operator.sub)) def test_add_subtract(self, op): mapmb = op(self.ma, self.mb) expected_data = op(self.a, self.b) expected_mask = (self.ma.mask | self.mb.mask) # Note: assert_array_equal also checks type, i.e., that, e.g., # Longitude decays into an Angle. assert_array_equal(mapmb.unmasked, expected_data) assert_array_equal(mapmb.mask, expected_mask) @pytest.mark.parametrize('op', (operator.eq, operator.ne)) def test_equality(self, op): mapmb = op(self.ma, self.mb) expected_data = op(self.a, self.b) expected_mask = (self.ma.mask | self.mb.mask) # Note: assert_array_equal also checks type, i.e., that boolean # output is represented as plain Masked ndarray. assert_array_equal(mapmb.unmasked, expected_data) assert_array_equal(mapmb.mask, expected_mask) def test_not_implemented(self): with pytest.raises(TypeError): self.ma > 'abc' @pytest.mark.parametrize('different_names', [False, True]) @pytest.mark.parametrize('op', (operator.eq, operator.ne)) def test_structured_equality(self, op, different_names): msb = self.msb if different_names: msb = msb.astype([(f'different_{name}', dt) for name, dt in msb.dtype.fields.items()]) mapmb = op(self.msa, self.msb) # Expected is a bit tricky here: only unmasked fields count expected_data = np.ones(mapmb.shape, bool) expected_mask = np.ones(mapmb.shape, bool) for field in self.sdt.names: fa, mfa = self.sa[field], self.mask_sa[field] fb, mfb = self.sb[field], self.mask_sb[field] mfequal = mfa | mfb fequal = (fa == fb) | mfequal expected_data &= fequal expected_mask &= mfequal if op is operator.ne: expected_data = ~expected_data # Note: assert_array_equal also checks type, i.e., that boolean # output is represented as plain Masked ndarray. assert_array_equal(mapmb.unmasked, expected_data) assert_array_equal(mapmb.mask, expected_mask) def test_matmul(self): result = self.ma.T @ self.ma assert_array_equal(result.unmasked, self.a.T @ self.a) mask1 = np.any(self.mask_a, axis=0) expected_mask = np.logical_or.outer(mask1, mask1) assert_array_equal(result.mask, expected_mask) result2 = self.ma.T @ self.a assert_array_equal(result2.unmasked, self.a.T @ self.a) expected_mask2 = np.logical_or.outer(mask1, np.zeros(3, bool)) assert_array_equal(result2.mask, expected_mask2) result3 = self.a.T @ self.ma assert_array_equal(result3.unmasked, self.a.T @ self.a) expected_mask3 = np.logical_or.outer(np.zeros(3, bool), mask1) assert_array_equal(result3.mask, expected_mask3) def test_matvec(self): result = self.ma @ self.mb assert np.all(result.mask) assert_array_equal(result.unmasked, self.a @ self.b) # Just using the masked vector still has all elements masked. result2 = self.a @ self.mb assert np.all(result2.mask) assert_array_equal(result2.unmasked, self.a @ self.b) new_ma = self.ma.copy() new_ma.mask[0, 0] = False result3 = new_ma @ self.b assert_array_equal(result3.unmasked, self.a @ self.b) assert_array_equal(result3.mask, new_ma.mask.any(-1)) def test_vecmat(self): result = self.mb @ self.ma.T assert np.all(result.mask) assert_array_equal(result.unmasked, self.b @ self.a.T) result2 = self.b @ self.ma.T assert np.all(result2.mask) assert_array_equal(result2.unmasked, self.b @ self.a.T) new_ma = self.ma.T.copy() new_ma.mask[0, 0] = False result3 = self.b @ new_ma assert_array_equal(result3.unmasked, self.b @ self.a.T) assert_array_equal(result3.mask, new_ma.mask.any(0)) def test_vecvec(self): result = self.mb @ self.mb assert result.shape == () assert result.mask assert result.unmasked == self.b @ self.b mb_no_mask = Masked(self.b, False) result2 = mb_no_mask @ mb_no_mask assert not result2.mask class TestMaskedArrayOperators(MaskedOperatorTests): # Some further tests that use strings, which are not useful for Quantity. @pytest.mark.parametrize('op', (operator.eq, operator.ne)) def test_equality_strings(self, op): m1 = Masked(np.array(['a', 'b', 'c']), mask=[True, False, False]) m2 = Masked(np.array(['a', 'b', 'd']), mask=[False, False, False]) result = op(m1, m2) assert_array_equal(result.unmasked, op(m1.unmasked, m2.unmasked)) assert_array_equal(result.mask, m1.mask | m2.mask) result2 = op(m1, m2.unmasked) assert_masked_equal(result2, result) def test_not_implemented(self): with pytest.raises(TypeError): Masked(['a', 'b']) > object() class TestMaskedQuantityOperators(MaskedOperatorTests, QuantitySetup): pass class TestMaskedLongitudeOperators(MaskedOperatorTests, LongitudeSetup): pass class TestMaskedArrayMethods(MaskedArraySetup): def test_round(self): # Goes via ufunc, hence easy. mrc = self.mc.round() expected = Masked(self.c.round(), self.mask_c) assert_masked_equal(mrc, expected) @pytest.mark.parametrize('axis', (0, 1, None)) def test_sum(self, axis): ma_sum = self.ma.sum(axis) expected_data = self.a.sum(axis) expected_mask = self.ma.mask.any(axis) assert_array_equal(ma_sum.unmasked, expected_data) assert_array_equal(ma_sum.mask, expected_mask) @pytest.mark.parametrize('axis', (0, 1, None)) def test_cumsum(self, axis): ma_sum = self.ma.cumsum(axis) expected_data = self.a.cumsum(axis) mask = self.mask_a if axis is None: mask = mask.ravel() expected_mask = np.logical_or.accumulate(mask, axis=axis) assert_array_equal(ma_sum.unmasked, expected_data) assert_array_equal(ma_sum.mask, expected_mask) @pytest.mark.parametrize('axis', (0, 1, None)) def test_mean(self, axis): ma_mean = self.ma.mean(axis) filled = self.a.copy() filled[self.mask_a] = 0. count = 1 - self.ma.mask.astype(int) expected_data = filled.sum(axis) / count.sum(axis) expected_mask = self.ma.mask.all(axis) assert_array_equal(ma_mean.unmasked, expected_data) assert_array_equal(ma_mean.mask, expected_mask) def test_mean_int16(self): ma = self.ma.astype('i2') ma_mean = ma.mean() assert ma_mean.dtype == 'f8' expected = ma.astype('f8').mean() assert_masked_equal(ma_mean, expected) def test_mean_float16(self): ma = self.ma.astype('f2') ma_mean = ma.mean() assert ma_mean.dtype == 'f2' expected = self.ma.mean().astype('f2') assert_masked_equal(ma_mean, expected) def test_mean_inplace(self): expected = self.ma.mean(1) out = Masked(np.zeros_like(expected.unmasked)) result = self.ma.mean(1, out=out) assert result is out assert_masked_equal(out, expected) @pytest.mark.filterwarnings("ignore:.*true_divide.*") @pytest.mark.parametrize('axis', (0, 1, None)) def test_var(self, axis): ma_var = self.ma.var(axis) filled = (self.a - self.ma.mean(axis, keepdims=True))**2 filled[self.mask_a] = 0. count = (1 - self.ma.mask.astype(int)).sum(axis) expected_data = filled.sum(axis) / count expected_mask = self.ma.mask.all(axis) assert_array_equal(ma_var.unmasked, expected_data) assert_array_equal(ma_var.mask, expected_mask) ma_var1 = self.ma.var(axis, ddof=1) expected_data1 = filled.sum(axis) / (count - 1) expected_mask1 = self.ma.mask.all(axis) | (count <= 1) assert_array_equal(ma_var1.unmasked, expected_data1) assert_array_equal(ma_var1.mask, expected_mask1) ma_var5 = self.ma.var(axis, ddof=5) assert np.all(~np.isfinite(ma_var5.unmasked)) assert ma_var5.mask.all() def test_var_int16(self): ma = self.ma.astype('i2') ma_var = ma.var() assert ma_var.dtype == 'f8' expected = ma.astype('f8').var() assert_masked_equal(ma_var, expected) def test_std(self): ma_std = self.ma.std(1, ddof=1) ma_var1 = self.ma.var(1, ddof=1) expected = np.sqrt(ma_var1) assert_masked_equal(ma_std, expected) def test_std_inplace(self): expected = self.ma.std(1, ddof=1) out = Masked(np.zeros_like(expected.unmasked)) result = self.ma.std(1, ddof=1, out=out) assert result is out assert_masked_equal(result, expected) @pytest.mark.parametrize('axis', (0, 1, None)) def test_min(self, axis): ma_min = self.ma.min(axis) filled = self.a.copy() filled[self.mask_a] = self.a.max() expected_data = filled.min(axis) assert_array_equal(ma_min.unmasked, expected_data) assert not np.any(ma_min.mask) @pytest.mark.parametrize('axis', (0, 1, None)) def test_max(self, axis): ma_max = self.ma.max(axis) filled = self.a.copy() filled[self.mask_a] = self.a.min() expected_data = filled.max(axis) assert_array_equal(ma_max.unmasked, expected_data) assert not np.any(ma_max.mask) @pytest.mark.parametrize('axis', (0, 1, None)) def test_argmin(self, axis): ma_argmin = self.ma.argmin(axis) filled = self.a.copy() filled[self.mask_a] = self.a.max() expected_data = filled.argmin(axis) assert_array_equal(ma_argmin, expected_data) def test_argmin_only_one_unmasked_element(self): # Regression test for example from @taldcroft at # https://github.com/astropy/astropy/pull/11127#discussion_r600864559 ma = Masked(data=[1, 2], mask=[True, False]) assert ma.argmin() == 1 @pytest.mark.parametrize('axis', (0, 1, None)) def test_argmax(self, axis): ma_argmax = self.ma.argmax(axis) filled = self.a.copy() filled[self.mask_a] = self.a.min() expected_data = filled.argmax(axis) assert_array_equal(ma_argmax, expected_data) @pytest.mark.parametrize('axis', (0, 1, None)) def test_argsort(self, axis): ma_argsort = self.ma.argsort(axis) filled = self.a.copy() filled[self.mask_a] = self.a.max() * 1.1 expected_data = filled.argsort(axis) assert_array_equal(ma_argsort, expected_data) @pytest.mark.parametrize('order', [None, 'a', ('a', 'b'), ('b', 'a')]) @pytest.mark.parametrize('axis', [0, 1]) def test_structured_argsort(self, axis, order): ma_argsort = self.msa.argsort(axis, order=order) filled = self.msa.filled(fill_value=np.array((np.inf, np.inf), dtype=self.sdt)) expected_data = filled.argsort(axis, order=order) assert_array_equal(ma_argsort, expected_data) def test_argsort_error(self): with pytest.raises(ValueError, match='when the array has no fields'): self.ma.argsort(axis=0, order='a') @pytest.mark.parametrize('axis', (0, 1)) def test_sort(self, axis): ma_sort = self.ma.copy() ma_sort.sort(axis) indices = self.ma.argsort(axis) expected_data = np.take_along_axis(self.ma.unmasked, indices, axis) expected_mask = np.take_along_axis(self.ma.mask, indices, axis) assert_array_equal(ma_sort.unmasked, expected_data) assert_array_equal(ma_sort.mask, expected_mask) @pytest.mark.parametrize('kth', [1, 3]) def test_argpartition(self, kth): ma = self.ma.ravel() ma_argpartition = ma.argpartition(kth) partitioned = ma[ma_argpartition] assert (partitioned[:kth] < partitioned[kth]).all() assert (partitioned[kth:] >= partitioned[kth]).all() if partitioned[kth].mask: assert all(partitioned.mask[kth:]) else: assert not any(partitioned.mask[:kth]) @pytest.mark.parametrize('kth', [1, 3]) def test_partition(self, kth): partitioned = self.ma.flatten() partitioned.partition(kth) assert (partitioned[:kth] < partitioned[kth]).all() assert (partitioned[kth:] >= partitioned[kth]).all() if partitioned[kth].mask: assert all(partitioned.mask[kth:]) else: assert not any(partitioned.mask[:kth]) def test_all_explicit(self): a1 = np.array([[1., 2.], [3., 4.]]) a2 = np.array([[1., 0.], [3., 4.]]) if self._data_cls is not np.ndarray: a1 = self._data_cls(a1, self.a.unit) a2 = self._data_cls(a2, self.a.unit) ma1 = Masked(a1, mask=[[False, False], [True, True]]) ma2 = Masked(a2, mask=[[False, True], [False, True]]) ma1_eq_ma2 = ma1 == ma2 assert_array_equal(ma1_eq_ma2.unmasked, np.array([[True, False], [True, True]])) assert_array_equal(ma1_eq_ma2.mask, np.array([[False, True], [True, True]])) assert ma1_eq_ma2.all() assert not (ma1 != ma2).all() ma_eq1 = ma1_eq_ma2.all(1) assert_array_equal(ma_eq1.mask, np.array([False, True])) assert bool(ma_eq1[0]) is True assert bool(ma_eq1[1]) is False ma_eq0 = ma1_eq_ma2.all(0) assert_array_equal(ma_eq0.mask, np.array([False, True])) assert bool(ma_eq1[0]) is True assert bool(ma_eq1[1]) is False @pytest.mark.parametrize('method', ['any', 'all']) @pytest.mark.parametrize('array,axis', [ ('a', 0), ('a', 1), ('a', None), ('b', None), ('c', 0), ('c', 1), ('c', None)]) def test_all_and_any(self, array, axis, method): ma = getattr(self, 'm'+array) ma_eq = ma == ma ma_all_or_any = getattr(ma_eq, method)(axis=axis) filled = ma_eq.unmasked.copy() filled[ma_eq.mask] = method == 'all' a_all_or_any = getattr(filled, method)(axis=axis) all_masked = ma.mask.all(axis) assert_array_equal(ma_all_or_any.mask, all_masked) assert_array_equal(ma_all_or_any.unmasked, a_all_or_any) # interpretation as bool as_bool = [bool(a) for a in ma_all_or_any.ravel()] expected = [bool(a) for a in (a_all_or_any & ~all_masked).ravel()] assert as_bool == expected def test_any_inplace(self): ma_eq = self.ma == self.ma expected = ma_eq.any(1) out = Masked(np.zeros_like(expected.unmasked)) result = ma_eq.any(1, out=out) assert result is out assert_masked_equal(result, expected) @pytest.mark.parametrize('offset', (0, 1)) def test_diagonal(self, offset): mda = self.ma.diagonal(offset=offset) expected = Masked(self.a.diagonal(offset=offset), self.mask_a.diagonal(offset=offset)) assert_masked_equal(mda, expected) @pytest.mark.parametrize('offset', (0, 1)) def test_trace(self, offset): mta = self.ma.trace(offset=offset) expected = Masked(self.a.trace(offset=offset), self.mask_a.trace(offset=offset, dtype=bool)) assert_masked_equal(mta, expected) def test_clip(self): maclip = self.ma.clip(self.b, self.c) expected = Masked(self.a.clip(self.b, self.c), self.mask_a) assert_masked_equal(maclip, expected) def test_clip_masked_min_max(self): maclip = self.ma.clip(self.mb, self.mc) # Need to be careful with min, max because of Longitude, which wraps. dmax = np.maximum(np.maximum(self.a, self.b), self.c).max() dmin = np.minimum(np.minimum(self.a, self.b), self.c).min() expected = Masked(self.a.clip(self.mb.filled(dmin), self.mc.filled(dmax)), mask=self.mask_a) assert_masked_equal(maclip, expected) class TestMaskedQuantityMethods(TestMaskedArrayMethods, QuantitySetup): pass class TestMaskedLongitudeMethods(TestMaskedArrayMethods, LongitudeSetup): pass class TestMaskedArrayProductMethods(MaskedArraySetup): # These cannot work on Quantity, so done separately @pytest.mark.parametrize('axis', (0, 1, None)) def test_prod(self, axis): ma_sum = self.ma.prod(axis) expected_data = self.a.prod(axis) expected_mask = self.ma.mask.any(axis) assert_array_equal(ma_sum.unmasked, expected_data) assert_array_equal(ma_sum.mask, expected_mask) @pytest.mark.parametrize('axis', (0, 1, None)) def test_cumprod(self, axis): ma_sum = self.ma.cumprod(axis) expected_data = self.a.cumprod(axis) mask = self.mask_a if axis is None: mask = mask.ravel() expected_mask = np.logical_or.accumulate(mask, axis=axis) assert_array_equal(ma_sum.unmasked, expected_data) assert_array_equal(ma_sum.mask, expected_mask) def test_masked_str_explicit(): sa = np.array([(1., 2.), (3., 4.)], dtype='f8,f8') msa = Masked(sa, [(False, True), (False, False)]) assert str(msa) == "[(1., ——) (3., 4.)]" assert str(msa[0]) == "(1., ——)" assert str(msa[1]) == "(3., 4.)" with np.printoptions(precision=3, floatmode='fixed'): assert str(msa) == "[(1.000, ———) (3.000, 4.000)]" def test_masked_repr_explicit(): # Use explicit endianness to ensure tests pass on all architectures sa = np.array([(1., 2.), (3., 4.)], dtype='>f8,>f8') msa = Masked(sa, [(False, True), (False, False)]) assert repr(msa) == ("MaskedNDArray([(1., ——), (3., 4.)], " "dtype=[('f0', '>f8'), ('f1', '>f8')])") assert repr(msa[0]) == ("MaskedNDArray((1., ——), " "dtype=[('f0', '>f8'), ('f1', '>f8')])") assert repr(msa[1]) == ("MaskedNDArray((3., 4.), " "dtype=[('f0', '>f8'), ('f1', '>f8')])") def test_masked_repr_summary(): ma = Masked(np.arange(15.), mask=[True]+[False]*14) with np.printoptions(threshold=2): assert repr(ma) == ( "MaskedNDArray([———, 1., 2., ..., 12., 13., 14.])") def test_masked_repr_nodata(): assert repr(Masked([])) == "MaskedNDArray([], dtype=float64)" class TestMaskedArrayRepr(MaskedArraySetup): def test_array_str(self): # very blunt check they work at all. str(self.ma) str(self.mb) str(self.mc) str(self.msa) str(self.msb) def test_scalar_str(self): assert self.mb[0].shape == () str(self.mb[0]) assert self.msb[0].shape == () str(self.msb[0]) def test_array_repr(self): repr(self.ma) repr(self.mb) repr(self.mc) repr(self.msa) repr(self.msb) def test_scalar_repr(self): repr(self.mb[0]) repr(self.msb[0]) class TestMaskedQuantityRepr(TestMaskedArrayRepr, QuantitySetup): pass class TestMaskedRecarray(MaskedArraySetup): @classmethod def setup_class(self): super().setup_class() self.ra = self.sa.view(np.recarray) self.mra = Masked(self.ra, mask=self.mask_sa) def test_recarray_setup(self): assert isinstance(self.mra, Masked) assert isinstance(self.mra, np.recarray) assert np.all(self.mra.unmasked == self.ra) assert np.all(self.mra.mask == self.mask_sa) assert_array_equal(self.mra.view(np.ndarray), self.sa) assert isinstance(self.mra.a, Masked) assert_array_equal(self.mra.a.unmasked, self.sa['a']) assert_array_equal(self.mra.a.mask, self.mask_sa['a']) def test_recarray_setting(self): mra = self.mra.copy() mra.a = self.msa['b'] assert_array_equal(mra.a.unmasked, self.msa['b'].unmasked) assert_array_equal(mra.a.mask, self.msa['b'].mask) @pytest.mark.parametrize('attr', [0, 'a']) def test_recarray_field_getting(self, attr): mra_a = self.mra.field(attr) assert isinstance(mra_a, Masked) assert_array_equal(mra_a.unmasked, self.sa['a']) assert_array_equal(mra_a.mask, self.mask_sa['a']) @pytest.mark.parametrize('attr', [0, 'a']) def test_recarray_field_setting(self, attr): mra = self.mra.copy() mra.field(attr, self.msa['b']) assert_array_equal(mra.a.unmasked, self.msa['b'].unmasked) assert_array_equal(mra.a.mask, self.msa['b'].mask)
pllim/astropy
astropy/utils/masked/tests/test_masked.py
astropy/constants/astropyconst40.py
# Licensed under a 3-clause BSD style license - see LICENSE.rst """The ShapedLikeNDArray mixin class and shape-related functions.""" import abc from itertools import zip_longest import numpy as np __all__ = ['NDArrayShapeMethods', 'ShapedLikeNDArray', 'check_broadcast', 'IncompatibleShapeError', 'unbroadcast'] class NDArrayShapeMethods: """Mixin class to provide shape-changing methods. The class proper is assumed to have some underlying data, which are arrays or array-like structures. It must define a ``shape`` property, which gives the shape of those data, as well as an ``_apply`` method that creates a new instance in which a `~numpy.ndarray` method has been applied to those. Furthermore, for consistency with `~numpy.ndarray`, it is recommended to define a setter for the ``shape`` property, which, like the `~numpy.ndarray.shape` property allows in-place reshaping the internal data (and, unlike the ``reshape`` method raises an exception if this is not possible). This class only provides the shape-changing methods and is meant in particular for `~numpy.ndarray` subclasses that need to keep track of other arrays. For other classes, `~astropy.utils.shapes.ShapedLikeNDArray` is recommended. """ # Note to developers: if new methods are added here, be sure to check that # they work properly with the classes that use this, such as Time and # BaseRepresentation, i.e., look at their ``_apply`` methods and add # relevant tests. This is particularly important for methods that imply # copies rather than views of data (see the special-case treatment of # 'flatten' in Time). def __getitem__(self, item): return self._apply('__getitem__', item) def copy(self, *args, **kwargs): """Return an instance containing copies of the internal data. Parameters are as for :meth:`~numpy.ndarray.copy`. """ return self._apply('copy', *args, **kwargs) def reshape(self, *args, **kwargs): """Returns an instance containing the same data with a new shape. Parameters are as for :meth:`~numpy.ndarray.reshape`. Note that it is not always possible to change the shape of an array without copying the data (see :func:`~numpy.reshape` documentation). If you want an error to be raise if the data is copied, you should assign the new shape to the shape attribute (note: this may not be implemented for all classes using ``NDArrayShapeMethods``). """ return self._apply('reshape', *args, **kwargs) def ravel(self, *args, **kwargs): """Return an instance with the array collapsed into one dimension. Parameters are as for :meth:`~numpy.ndarray.ravel`. Note that it is not always possible to unravel an array without copying the data. If you want an error to be raise if the data is copied, you should should assign shape ``(-1,)`` to the shape attribute. """ return self._apply('ravel', *args, **kwargs) def flatten(self, *args, **kwargs): """Return a copy with the array collapsed into one dimension. Parameters are as for :meth:`~numpy.ndarray.flatten`. """ return self._apply('flatten', *args, **kwargs) def transpose(self, *args, **kwargs): """Return an instance with the data transposed. Parameters are as for :meth:`~numpy.ndarray.transpose`. All internal data are views of the data of the original. """ return self._apply('transpose', *args, **kwargs) @property def T(self): """Return an instance with the data transposed. Parameters are as for :attr:`~numpy.ndarray.T`. All internal data are views of the data of the original. """ if self.ndim < 2: return self else: return self.transpose() def swapaxes(self, *args, **kwargs): """Return an instance with the given axes interchanged. Parameters are as for :meth:`~numpy.ndarray.swapaxes`: ``axis1, axis2``. All internal data are views of the data of the original. """ return self._apply('swapaxes', *args, **kwargs) def diagonal(self, *args, **kwargs): """Return an instance with the specified diagonals. Parameters are as for :meth:`~numpy.ndarray.diagonal`. All internal data are views of the data of the original. """ return self._apply('diagonal', *args, **kwargs) def squeeze(self, *args, **kwargs): """Return an instance with single-dimensional shape entries removed Parameters are as for :meth:`~numpy.ndarray.squeeze`. All internal data are views of the data of the original. """ return self._apply('squeeze', *args, **kwargs) def take(self, indices, axis=None, out=None, mode='raise'): """Return a new instance formed from the elements at the given indices. Parameters are as for :meth:`~numpy.ndarray.take`, except that, obviously, no output array can be given. """ if out is not None: return NotImplementedError("cannot pass 'out' argument to 'take.") return self._apply('take', indices, axis=axis, mode=mode) class ShapedLikeNDArray(NDArrayShapeMethods, metaclass=abc.ABCMeta): """Mixin class to provide shape-changing methods. The class proper is assumed to have some underlying data, which are arrays or array-like structures. It must define a ``shape`` property, which gives the shape of those data, as well as an ``_apply`` method that creates a new instance in which a `~numpy.ndarray` method has been applied to those. Furthermore, for consistency with `~numpy.ndarray`, it is recommended to define a setter for the ``shape`` property, which, like the `~numpy.ndarray.shape` property allows in-place reshaping the internal data (and, unlike the ``reshape`` method raises an exception if this is not possible). This class also defines default implementations for ``ndim`` and ``size`` properties, calculating those from the ``shape``. These can be overridden by subclasses if there are faster ways to obtain those numbers. """ # Note to developers: if new methods are added here, be sure to check that # they work properly with the classes that use this, such as Time and # BaseRepresentation, i.e., look at their ``_apply`` methods and add # relevant tests. This is particularly important for methods that imply # copies rather than views of data (see the special-case treatment of # 'flatten' in Time). @property @abc.abstractmethod def shape(self): """The shape of the underlying data.""" @abc.abstractmethod def _apply(method, *args, **kwargs): """Create a new instance, with ``method`` applied to underlying data. The method is any of the shape-changing methods for `~numpy.ndarray` (``reshape``, ``swapaxes``, etc.), as well as those picking particular elements (``__getitem__``, ``take``, etc.). It will be applied to the underlying arrays (e.g., ``jd1`` and ``jd2`` in `~astropy.time.Time`), with the results used to create a new instance. Parameters ---------- method : str Method to be applied to the instance's internal data arrays. args : tuple Any positional arguments for ``method``. kwargs : dict Any keyword arguments for ``method``. """ @property def ndim(self): """The number of dimensions of the instance and underlying arrays.""" return len(self.shape) @property def size(self): """The size of the object, as calculated from its shape.""" size = 1 for sh in self.shape: size *= sh return size @property def isscalar(self): return self.shape == () def __len__(self): if self.isscalar: raise TypeError("Scalar {!r} object has no len()" .format(self.__class__.__name__)) return self.shape[0] def __bool__(self): """Any instance should evaluate to True, except when it is empty.""" return self.size > 0 def __getitem__(self, item): try: return self._apply('__getitem__', item) except IndexError: if self.isscalar: raise TypeError('scalar {!r} object is not subscriptable.' .format(self.__class__.__name__)) else: raise def __iter__(self): if self.isscalar: raise TypeError('scalar {!r} object is not iterable.' .format(self.__class__.__name__)) # We cannot just write a generator here, since then the above error # would only be raised once we try to use the iterator, rather than # upon its definition using iter(self). def self_iter(): for idx in range(len(self)): yield self[idx] return self_iter() # Functions that change shape or essentially do indexing. _APPLICABLE_FUNCTIONS = { np.moveaxis, np.rollaxis, np.atleast_1d, np.atleast_2d, np.atleast_3d, np.expand_dims, np.broadcast_to, np.flip, np.fliplr, np.flipud, np.rot90, np.roll, np.delete, } # Functions that themselves defer to a method. Those are all # defined in np.core.fromnumeric, but exclude alen as well as # sort and partition, which make copies before calling the method. _METHOD_FUNCTIONS = {getattr(np, name): {'amax': 'max', 'amin': 'min', 'around': 'round', 'round_': 'round', 'alltrue': 'all', 'sometrue': 'any'}.get(name, name) for name in np.core.fromnumeric.__all__ if name not in ['alen', 'sort', 'partition']} # Add np.copy, which we may as well let defer to our method. _METHOD_FUNCTIONS[np.copy] = 'copy' # Could be made to work with a bit of effort: # np.where, np.compress, np.extract, # np.diag_indices_from, np.triu_indices_from, np.tril_indices_from # np.tile, np.repeat (need .repeat method) # TODO: create a proper implementation. # Furthermore, some arithmetic functions such as np.mean, np.median, # could work for Time, and many more for TimeDelta, so those should # override __array_function__. def __array_function__(self, function, types, args, kwargs): """Wrap numpy functions that make sense.""" if function in self._APPLICABLE_FUNCTIONS: if function is np.broadcast_to: # Ensure that any ndarray subclasses used are # properly propagated. kwargs.setdefault('subok', True) elif (function in {np.atleast_1d, np.atleast_2d, np.atleast_3d} and len(args) > 1): return tuple(function(arg, **kwargs) for arg in args) if self is not args[0]: return NotImplemented return self._apply(function, *args[1:], **kwargs) # For functions that defer to methods, use the corresponding # method/attribute if we have it. Otherwise, fall through. if self is args[0] and function in self._METHOD_FUNCTIONS: method = getattr(self, self._METHOD_FUNCTIONS[function], None) if method is not None: if callable(method): return method(*args[1:], **kwargs) else: # For np.shape, etc., just return the attribute. return method # Fall-back, just pass the arguments on since perhaps the function # works already (see above). return function.__wrapped__(*args, **kwargs) class IncompatibleShapeError(ValueError): def __init__(self, shape_a, shape_a_idx, shape_b, shape_b_idx): super().__init__(shape_a, shape_a_idx, shape_b, shape_b_idx) def check_broadcast(*shapes): """ Determines whether two or more Numpy arrays can be broadcast with each other based on their shape tuple alone. Parameters ---------- *shapes : tuple All shapes to include in the comparison. If only one shape is given it is passed through unmodified. If no shapes are given returns an empty `tuple`. Returns ------- broadcast : `tuple` If all shapes are mutually broadcastable, returns a tuple of the full broadcast shape. """ if len(shapes) == 0: return () elif len(shapes) == 1: return shapes[0] reversed_shapes = (reversed(shape) for shape in shapes) full_shape = [] for dims in zip_longest(*reversed_shapes, fillvalue=1): max_dim = 1 max_dim_idx = None for idx, dim in enumerate(dims): if dim == 1: continue if max_dim == 1: # The first dimension of size greater than 1 max_dim = dim max_dim_idx = idx elif dim != max_dim: raise IncompatibleShapeError( shapes[max_dim_idx], max_dim_idx, shapes[idx], idx) full_shape.append(max_dim) return tuple(full_shape[::-1]) def unbroadcast(array): """ Given an array, return a new array that is the smallest subset of the original array that can be re-broadcasted back to the original array. See https://stackoverflow.com/questions/40845769/un-broadcasting-numpy-arrays for more details. """ if array.ndim == 0: return array array = array[tuple((slice(0, 1) if stride == 0 else slice(None)) for stride in array.strides)] # Remove leading ones, which are not needed in numpy broadcasting. first_not_unity = next((i for (i, s) in enumerate(array.shape) if s > 1), array.ndim) return array.reshape(array.shape[first_not_unity:])
# Licensed under a 3-clause BSD style license - see LICENSE.rst """Test masked class initialization, methods, and operators. Functions, including ufuncs, are tested in test_functions.py """ import operator import numpy as np from numpy.testing import assert_array_equal import pytest from astropy import units as u from astropy.units import Quantity from astropy.coordinates import Longitude from astropy.utils.masked import Masked, MaskedNDArray def assert_masked_equal(a, b): assert_array_equal(a.unmasked, b.unmasked) assert_array_equal(a.mask, b.mask) VARIOUS_ITEMS = [ (1, 1), slice(None, 1), (), 1] class ArraySetup: _data_cls = np.ndarray @classmethod def setup_class(self): self.a = np.arange(6.).reshape(2, 3) self.mask_a = np.array([[True, False, False], [False, True, False]]) self.b = np.array([-3., -2., -1.]) self.mask_b = np.array([False, True, False]) self.c = np.array([[0.25], [0.5]]) self.mask_c = np.array([[False], [True]]) self.sdt = np.dtype([('a', 'f8'), ('b', 'f8')]) self.mask_sdt = np.dtype([('a', '?'), ('b', '?')]) self.sa = np.array([[(1., 2.), (3., 4.)], [(11., 12.), (13., 14.)]], dtype=self.sdt) self.mask_sa = np.array([[(True, True), (False, False)], [(False, True), (True, False)]], dtype=self.mask_sdt) self.sb = np.array([(1., 2.), (-3., 4.)], dtype=self.sdt) self.mask_sb = np.array([(True, False), (False, False)], dtype=self.mask_sdt) class QuantitySetup(ArraySetup): _data_cls = Quantity @classmethod def setup_class(self): super().setup_class() self.a = Quantity(self.a, u.m) self.b = Quantity(self.b, u.cm) self.c = Quantity(self.c, u.km) self.sa = Quantity(self.sa, u.m, dtype=self.sdt) self.sb = Quantity(self.sb, u.cm, dtype=self.sdt) class LongitudeSetup(ArraySetup): _data_cls = Longitude @classmethod def setup_class(self): super().setup_class() self.a = Longitude(self.a, u.deg) self.b = Longitude(self.b, u.deg) self.c = Longitude(self.c, u.deg) # Note: Longitude does not work on structured arrays, so # leaving it as regular array (which just reruns some tests). class TestMaskedArrayInitialization(ArraySetup): def test_simple(self): ma = Masked(self.a, mask=self.mask_a) assert isinstance(ma, np.ndarray) assert isinstance(ma, type(self.a)) assert isinstance(ma, Masked) assert_array_equal(ma.unmasked, self.a) assert_array_equal(ma.mask, self.mask_a) assert ma.mask is not self.mask_a assert np.may_share_memory(ma.mask, self.mask_a) def test_structured(self): ma = Masked(self.sa, mask=self.mask_sa) assert isinstance(ma, np.ndarray) assert isinstance(ma, type(self.sa)) assert isinstance(ma, Masked) assert_array_equal(ma.unmasked, self.sa) assert_array_equal(ma.mask, self.mask_sa) assert ma.mask is not self.mask_sa assert np.may_share_memory(ma.mask, self.mask_sa) def test_masked_ndarray_init(): # Note: as a straight ndarray subclass, MaskedNDArray passes on # the arguments relevant for np.ndarray, not np.array. a_in = np.arange(3, dtype=int) m_in = np.array([True, False, False]) buff = a_in.tobytes() # Check we're doing things correctly using regular ndarray. a = np.ndarray(shape=(3,), dtype=int, buffer=buff) assert_array_equal(a, a_in) # Check with and without mask. ma = MaskedNDArray((3,), dtype=int, mask=m_in, buffer=buff) assert_array_equal(ma.unmasked, a_in) assert_array_equal(ma.mask, m_in) ma = MaskedNDArray((3,), dtype=int, buffer=buff) assert_array_equal(ma.unmasked, a_in) assert_array_equal(ma.mask, np.zeros(3, bool)) def test_cannot_initialize_with_masked(): with pytest.raises(ValueError, match='cannot handle np.ma.masked'): Masked(np.ma.masked) def test_cannot_just_use_anything_with_a_mask_attribute(): class my_array(np.ndarray): mask = True a = np.array([1., 2.]).view(my_array) with pytest.raises(AttributeError, match='unmasked'): Masked(a) class TestMaskedClassCreation: """Try creating a MaskedList and subclasses. By no means meant to be realistic, just to check that the basic machinery allows it. """ @classmethod def setup_class(self): self._base_classes_orig = Masked._base_classes.copy() self._masked_classes_orig = Masked._masked_classes.copy() class MaskedList(Masked, list, base_cls=list, data_cls=list): def __new__(cls, *args, mask=None, copy=False, **kwargs): self = super().__new__(cls) self._unmasked = self._data_cls(*args, **kwargs) self.mask = mask return self # Need to have shape for basics to work. @property def shape(self): return (len(self._unmasked),) self.MaskedList = MaskedList def teardown_class(self): Masked._base_classes = self._base_classes_orig Masked._masked_classes = self._masked_classes_orig def test_setup(self): assert issubclass(self.MaskedList, Masked) assert issubclass(self.MaskedList, list) assert Masked(list) is self.MaskedList def test_masked_list(self): ml = self.MaskedList(range(3), mask=[True, False, False]) assert ml.unmasked == [0, 1, 2] assert_array_equal(ml.mask, np.array([True, False, False])) ml01 = ml[:2] assert ml01.unmasked == [0, 1] assert_array_equal(ml01.mask, np.array([True, False])) def test_from_list(self): ml = Masked([1, 2, 3], mask=[True, False, False]) assert ml.unmasked == [1, 2, 3] assert_array_equal(ml.mask, np.array([True, False, False])) def test_masked_list_subclass(self): class MyList(list): pass ml = MyList(range(3)) mml = Masked(ml, mask=[False, True, False]) assert isinstance(mml, Masked) assert isinstance(mml, MyList) assert isinstance(mml.unmasked, MyList) assert mml.unmasked == [0, 1, 2] assert_array_equal(mml.mask, np.array([False, True, False])) assert Masked(MyList) is type(mml) class TestMaskedNDArraySubclassCreation: """Test that masked subclasses can be created directly and indirectly.""" @classmethod def setup_class(self): class MyArray(np.ndarray): def __new__(cls, *args, **kwargs): return np.asanyarray(*args, **kwargs).view(cls) self.MyArray = MyArray self.a = np.array([1., 2.]).view(self.MyArray) self.m = np.array([True, False], dtype=bool) def teardown_method(self, method): Masked._masked_classes.pop(self.MyArray, None) def test_direct_creation(self): assert self.MyArray not in Masked._masked_classes mcls = Masked(self.MyArray) assert issubclass(mcls, Masked) assert issubclass(mcls, self.MyArray) assert mcls.__name__ == 'MaskedMyArray' assert mcls.__doc__.startswith('Masked version of MyArray') mms = mcls(self.a, mask=self.m) assert isinstance(mms, mcls) assert_array_equal(mms.unmasked, self.a) assert_array_equal(mms.mask, self.m) def test_initialization_without_mask(self): # Default for not giving a mask should be False. mcls = Masked(self.MyArray) mms = mcls(self.a) assert isinstance(mms, mcls) assert_array_equal(mms.unmasked, self.a) assert_array_equal(mms.mask, np.zeros(mms.shape, bool)) @pytest.mark.parametrize('masked_array', [Masked, np.ma.MaskedArray]) def test_initialization_with_masked_values(self, masked_array): mcls = Masked(self.MyArray) ma = masked_array(np.asarray(self.a), mask=self.m) mms = mcls(ma) assert isinstance(mms, Masked) assert isinstance(mms, self.MyArray) assert_array_equal(mms.unmasked, self.a) assert_array_equal(mms.mask, self.m) def test_indirect_creation(self): assert self.MyArray not in Masked._masked_classes mms = Masked(self.a, mask=self.m) assert isinstance(mms, Masked) assert isinstance(mms, self.MyArray) assert_array_equal(mms.unmasked, self.a) assert_array_equal(mms.mask, self.m) assert self.MyArray in Masked._masked_classes assert Masked(self.MyArray) is type(mms) def test_can_initialize_with_masked_values(self): mcls = Masked(self.MyArray) mms = mcls(Masked(np.asarray(self.a), mask=self.m)) assert isinstance(mms, Masked) assert isinstance(mms, self.MyArray) assert_array_equal(mms.unmasked, self.a) assert_array_equal(mms.mask, self.m) def test_viewing(self): mms = Masked(self.a, mask=self.m) mms2 = mms.view() assert type(mms2) is mms.__class__ assert_masked_equal(mms2, mms) ma = mms.view(np.ndarray) assert type(ma) is MaskedNDArray assert_array_equal(ma.unmasked, self.a.view(np.ndarray)) assert_array_equal(ma.mask, self.m) class TestMaskedQuantityInitialization(TestMaskedArrayInitialization, QuantitySetup): def test_masked_quantity_class_init(self): # TODO: class definitions should be more easily accessible. mcls = Masked._masked_classes[self.a.__class__] # This is not a very careful test. mq = mcls([1., 2.], mask=[True, False], unit=u.s) assert mq.unit == u.s assert np.all(mq.value.unmasked == [1., 2.]) assert np.all(mq.value.mask == [True, False]) assert np.all(mq.mask == [True, False]) def test_masked_quantity_getting(self): mcls = Masked._masked_classes[self.a.__class__] MQ = Masked(Quantity) assert MQ is mcls def test_initialization_without_mask(self): # Default for not giving a mask should be False. MQ = Masked(Quantity) mq = MQ([1., 2.], u.s) assert mq.unit == u.s assert np.all(mq.value.unmasked == [1., 2.]) assert np.all(mq.mask == [False, False]) @pytest.mark.parametrize('masked_array', [Masked, np.ma.MaskedArray]) def test_initialization_with_masked_values(self, masked_array): MQ = Masked(Quantity) a = np.array([1., 2.]) m = np.array([True, False]) ma = masked_array(a, m) mq = MQ(ma) assert isinstance(mq, Masked) assert isinstance(mq, Quantity) assert_array_equal(mq.value.unmasked, a) assert_array_equal(mq.mask, m) class TestMaskSetting(ArraySetup): def test_whole_mask_setting_simple(self): ma = Masked(self.a) assert ma.mask.shape == ma.shape assert not ma.mask.any() ma.mask = True assert ma.mask.shape == ma.shape assert ma.mask.all() ma.mask = [[True], [False]] assert ma.mask.shape == ma.shape assert_array_equal(ma.mask, np.array([[True] * 3, [False] * 3])) ma.mask = self.mask_a assert ma.mask.shape == ma.shape assert_array_equal(ma.mask, self.mask_a) assert ma.mask is not self.mask_a assert np.may_share_memory(ma.mask, self.mask_a) def test_whole_mask_setting_structured(self): ma = Masked(self.sa) assert ma.mask.shape == ma.shape assert not ma.mask['a'].any() and not ma.mask['b'].any() ma.mask = True assert ma.mask.shape == ma.shape assert ma.mask['a'].all() and ma.mask['b'].all() ma.mask = [[True], [False]] assert ma.mask.shape == ma.shape assert_array_equal(ma.mask, np.array( [[(True, True)] * 2, [(False, False)] * 2], dtype=self.mask_sdt)) ma.mask = self.mask_sa assert ma.mask.shape == ma.shape assert_array_equal(ma.mask, self.mask_sa) assert ma.mask is not self.mask_sa assert np.may_share_memory(ma.mask, self.mask_sa) @pytest.mark.parametrize('item', VARIOUS_ITEMS) def test_part_mask_setting(self, item): ma = Masked(self.a) ma.mask[item] = True expected = np.zeros(ma.shape, bool) expected[item] = True assert_array_equal(ma.mask, expected) ma.mask[item] = False assert_array_equal(ma.mask, np.zeros(ma.shape, bool)) # Mask propagation mask = np.zeros(self.a.shape, bool) ma = Masked(self.a, mask) ma.mask[item] = True assert np.may_share_memory(ma.mask, mask) assert_array_equal(ma.mask, mask) @pytest.mark.parametrize('item', ['a'] + VARIOUS_ITEMS) def test_part_mask_setting_structured(self, item): ma = Masked(self.sa) ma.mask[item] = True expected = np.zeros(ma.shape, self.mask_sdt) expected[item] = True assert_array_equal(ma.mask, expected) ma.mask[item] = False assert_array_equal(ma.mask, np.zeros(ma.shape, self.mask_sdt)) # Mask propagation mask = np.zeros(self.sa.shape, self.mask_sdt) ma = Masked(self.sa, mask) ma.mask[item] = True assert np.may_share_memory(ma.mask, mask) assert_array_equal(ma.mask, mask) # Following are tests where we trust the initializer works. class MaskedArraySetup(ArraySetup): @classmethod def setup_class(self): super().setup_class() self.ma = Masked(self.a, mask=self.mask_a) self.mb = Masked(self.b, mask=self.mask_b) self.mc = Masked(self.c, mask=self.mask_c) self.msa = Masked(self.sa, mask=self.mask_sa) self.msb = Masked(self.sb, mask=self.mask_sb) class TestViewing(MaskedArraySetup): def test_viewing_as_new_type(self): ma2 = self.ma.view(type(self.ma)) assert_masked_equal(ma2, self.ma) ma3 = self.ma.view() assert_masked_equal(ma3, self.ma) def test_viewing_as_new_dtype(self): # Not very meaningful, but possible... ma2 = self.ma.view('c8') assert_array_equal(ma2.unmasked, self.a.view('c8')) assert_array_equal(ma2.mask, self.mask_a) @pytest.mark.parametrize('new_dtype', ['2f4', 'f8,f8,f8']) def test_viewing_as_new_dtype_not_implemented(self, new_dtype): # But cannot (yet) view in way that would need to create a new mask, # even though that view is possible for a regular array. check = self.a.view(new_dtype) with pytest.raises(NotImplementedError, match='different.*size'): self.ma.view(check.dtype) def test_viewing_as_something_impossible(self): with pytest.raises(TypeError): # Use intp to ensure have the same size as object, # otherwise we get a different error message Masked(np.array([1, 2], dtype=np.intp)).view(Masked) class TestMaskedArrayCopyFilled(MaskedArraySetup): def test_copy(self): ma_copy = self.ma.copy() assert type(ma_copy) is type(self.ma) assert_array_equal(ma_copy.unmasked, self.ma.unmasked) assert_array_equal(ma_copy.mask, self.ma.mask) assert not np.may_share_memory(ma_copy.unmasked, self.ma.unmasked) assert not np.may_share_memory(ma_copy.mask, self.ma.mask) @pytest.mark.parametrize('fill_value', (0, 1)) def test_filled(self, fill_value): fill_value = fill_value * getattr(self.a, 'unit', 1) expected = self.a.copy() expected[self.ma.mask] = fill_value result = self.ma.filled(fill_value) assert_array_equal(expected, result) def test_filled_no_fill_value(self): with pytest.raises(TypeError, match='missing 1 required'): self.ma.filled() @pytest.mark.parametrize('fill_value', [(0, 1), (-1, -1)]) def test_filled_structured(self, fill_value): fill_value = np.array(fill_value, dtype=self.sdt) if hasattr(self.sa, 'unit'): fill_value = fill_value << self.sa.unit expected = self.sa.copy() expected['a'][self.msa.mask['a']] = fill_value['a'] expected['b'][self.msa.mask['b']] = fill_value['b'] result = self.msa.filled(fill_value) assert_array_equal(expected, result) def test_flat(self): ma_copy = self.ma.copy() ma_flat = ma_copy.flat # Check that single item keeps class and mask ma_flat1 = ma_flat[1] assert ma_flat1.unmasked == self.a.flat[1] assert ma_flat1.mask == self.mask_a.flat[1] # As well as getting items via iteration. assert all((ma.unmasked == a and ma.mask == m) for (ma, a, m) in zip(self.ma.flat, self.a.flat, self.mask_a.flat)) # check that flat works like a view of the real array ma_flat[1] = self.b[1] assert ma_flat[1] == self.b[1] assert ma_copy[0, 1] == self.b[1] class TestMaskedQuantityCopyFilled(TestMaskedArrayCopyFilled, QuantitySetup): pass class TestMaskedLongitudeCopyFilled(TestMaskedArrayCopyFilled, LongitudeSetup): pass class TestMaskedArrayShaping(MaskedArraySetup): def test_reshape(self): ma_reshape = self.ma.reshape((6,)) expected_data = self.a.reshape((6,)) expected_mask = self.mask_a.reshape((6,)) assert ma_reshape.shape == expected_data.shape assert_array_equal(ma_reshape.unmasked, expected_data) assert_array_equal(ma_reshape.mask, expected_mask) def test_shape_setting(self): ma_reshape = self.ma.copy() ma_reshape.shape = 6, expected_data = self.a.reshape((6,)) expected_mask = self.mask_a.reshape((6,)) assert ma_reshape.shape == expected_data.shape assert_array_equal(ma_reshape.unmasked, expected_data) assert_array_equal(ma_reshape.mask, expected_mask) def test_shape_setting_failure(self): ma = self.ma.copy() with pytest.raises(ValueError, match='cannot reshape'): ma.shape = 5, assert ma.shape == self.ma.shape assert ma.mask.shape == self.ma.shape # Here, mask can be reshaped but array cannot. ma2 = Masked(np.broadcast_to([[1.], [2.]], self.a.shape), mask=self.mask_a) with pytest.raises(AttributeError, match='ncompatible shape'): ma2.shape = 6, assert ma2.shape == self.ma.shape assert ma2.mask.shape == self.ma.shape # Here, array can be reshaped but mask cannot. ma3 = Masked(self.a.copy(), mask=np.broadcast_to([[True], [False]], self.mask_a.shape)) with pytest.raises(AttributeError, match='ncompatible shape'): ma3.shape = 6, assert ma3.shape == self.ma.shape assert ma3.mask.shape == self.ma.shape def test_ravel(self): ma_ravel = self.ma.ravel() expected_data = self.a.ravel() expected_mask = self.mask_a.ravel() assert ma_ravel.shape == expected_data.shape assert_array_equal(ma_ravel.unmasked, expected_data) assert_array_equal(ma_ravel.mask, expected_mask) def test_transpose(self): ma_transpose = self.ma.transpose() expected_data = self.a.transpose() expected_mask = self.mask_a.transpose() assert ma_transpose.shape == expected_data.shape assert_array_equal(ma_transpose.unmasked, expected_data) assert_array_equal(ma_transpose.mask, expected_mask) def test_iter(self): for ma, d, m in zip(self.ma, self.a, self.mask_a): assert_array_equal(ma.unmasked, d) assert_array_equal(ma.mask, m) class MaskedItemTests(MaskedArraySetup): @pytest.mark.parametrize('item', VARIOUS_ITEMS) def test_getitem(self, item): ma_part = self.ma[item] expected_data = self.a[item] expected_mask = self.mask_a[item] assert_array_equal(ma_part.unmasked, expected_data) assert_array_equal(ma_part.mask, expected_mask) @pytest.mark.parametrize('item', ['a'] + VARIOUS_ITEMS) def test_getitem_structured(self, item): ma_part = self.msa[item] expected_data = self.sa[item] expected_mask = self.mask_sa[item] assert_array_equal(ma_part.unmasked, expected_data) assert_array_equal(ma_part.mask, expected_mask) @pytest.mark.parametrize('indices,axis', [ ([0, 1], 1), ([0, 1], 0), ([0, 1], None), ([[0, 1], [2, 3]], None)]) def test_take(self, indices, axis): ma_take = self.ma.take(indices, axis=axis) expected_data = self.a.take(indices, axis=axis) expected_mask = self.mask_a.take(indices, axis=axis) assert_array_equal(ma_take.unmasked, expected_data) assert_array_equal(ma_take.mask, expected_mask) ma_take2 = np.take(self.ma, indices, axis=axis) assert_masked_equal(ma_take2, ma_take) @pytest.mark.parametrize('item', VARIOUS_ITEMS) @pytest.mark.parametrize('mask', [None, True, False]) def test_setitem(self, item, mask): base = self.ma.copy() expected_data = self.a.copy() expected_mask = self.mask_a.copy() value = self.a[0, 0] if mask is None else Masked(self.a[0, 0], mask) base[item] = value expected_data[item] = value if mask is None else value.unmasked expected_mask[item] = False if mask is None else value.mask assert_array_equal(base.unmasked, expected_data) assert_array_equal(base.mask, expected_mask) @pytest.mark.parametrize('item', ['a'] + VARIOUS_ITEMS) @pytest.mark.parametrize('mask', [None, True, False]) def test_setitem_structured(self, item, mask): base = self.msa.copy() expected_data = self.sa.copy() expected_mask = self.mask_sa.copy() value = self.sa['b'] if item == 'a' else self.sa[0, 0] if mask is not None: value = Masked(value, mask) base[item] = value expected_data[item] = value if mask is None else value.unmasked expected_mask[item] = False if mask is None else value.mask assert_array_equal(base.unmasked, expected_data) assert_array_equal(base.mask, expected_mask) @pytest.mark.parametrize('item', VARIOUS_ITEMS) def test_setitem_np_ma_masked(self, item): base = self.ma.copy() expected_mask = self.mask_a.copy() base[item] = np.ma.masked expected_mask[item] = True assert_array_equal(base.unmasked, self.a) assert_array_equal(base.mask, expected_mask) class TestMaskedArrayItems(MaskedItemTests): @classmethod def setup_class(self): super().setup_class() self.d = np.array(['aa', 'bb']) self.mask_d = np.array([True, False]) self.md = Masked(self.d, self.mask_d) # Quantity, Longitude cannot hold strings. def test_getitem_strings(self): md = self.md.copy() md0 = md[0] assert md0.unmasked == self.d[0] assert md0.mask md_all = md[:] assert_masked_equal(md_all, md) def test_setitem_strings_np_ma_masked(self): md = self.md.copy() md[1] = np.ma.masked assert_array_equal(md.unmasked, self.d) assert_array_equal(md.mask, np.ones(2, bool)) class TestMaskedQuantityItems(MaskedItemTests, QuantitySetup): pass class TestMaskedLongitudeItems(MaskedItemTests, LongitudeSetup): pass class MaskedOperatorTests(MaskedArraySetup): @pytest.mark.parametrize('op', (operator.add, operator.sub)) def test_add_subtract(self, op): mapmb = op(self.ma, self.mb) expected_data = op(self.a, self.b) expected_mask = (self.ma.mask | self.mb.mask) # Note: assert_array_equal also checks type, i.e., that, e.g., # Longitude decays into an Angle. assert_array_equal(mapmb.unmasked, expected_data) assert_array_equal(mapmb.mask, expected_mask) @pytest.mark.parametrize('op', (operator.eq, operator.ne)) def test_equality(self, op): mapmb = op(self.ma, self.mb) expected_data = op(self.a, self.b) expected_mask = (self.ma.mask | self.mb.mask) # Note: assert_array_equal also checks type, i.e., that boolean # output is represented as plain Masked ndarray. assert_array_equal(mapmb.unmasked, expected_data) assert_array_equal(mapmb.mask, expected_mask) def test_not_implemented(self): with pytest.raises(TypeError): self.ma > 'abc' @pytest.mark.parametrize('different_names', [False, True]) @pytest.mark.parametrize('op', (operator.eq, operator.ne)) def test_structured_equality(self, op, different_names): msb = self.msb if different_names: msb = msb.astype([(f'different_{name}', dt) for name, dt in msb.dtype.fields.items()]) mapmb = op(self.msa, self.msb) # Expected is a bit tricky here: only unmasked fields count expected_data = np.ones(mapmb.shape, bool) expected_mask = np.ones(mapmb.shape, bool) for field in self.sdt.names: fa, mfa = self.sa[field], self.mask_sa[field] fb, mfb = self.sb[field], self.mask_sb[field] mfequal = mfa | mfb fequal = (fa == fb) | mfequal expected_data &= fequal expected_mask &= mfequal if op is operator.ne: expected_data = ~expected_data # Note: assert_array_equal also checks type, i.e., that boolean # output is represented as plain Masked ndarray. assert_array_equal(mapmb.unmasked, expected_data) assert_array_equal(mapmb.mask, expected_mask) def test_matmul(self): result = self.ma.T @ self.ma assert_array_equal(result.unmasked, self.a.T @ self.a) mask1 = np.any(self.mask_a, axis=0) expected_mask = np.logical_or.outer(mask1, mask1) assert_array_equal(result.mask, expected_mask) result2 = self.ma.T @ self.a assert_array_equal(result2.unmasked, self.a.T @ self.a) expected_mask2 = np.logical_or.outer(mask1, np.zeros(3, bool)) assert_array_equal(result2.mask, expected_mask2) result3 = self.a.T @ self.ma assert_array_equal(result3.unmasked, self.a.T @ self.a) expected_mask3 = np.logical_or.outer(np.zeros(3, bool), mask1) assert_array_equal(result3.mask, expected_mask3) def test_matvec(self): result = self.ma @ self.mb assert np.all(result.mask) assert_array_equal(result.unmasked, self.a @ self.b) # Just using the masked vector still has all elements masked. result2 = self.a @ self.mb assert np.all(result2.mask) assert_array_equal(result2.unmasked, self.a @ self.b) new_ma = self.ma.copy() new_ma.mask[0, 0] = False result3 = new_ma @ self.b assert_array_equal(result3.unmasked, self.a @ self.b) assert_array_equal(result3.mask, new_ma.mask.any(-1)) def test_vecmat(self): result = self.mb @ self.ma.T assert np.all(result.mask) assert_array_equal(result.unmasked, self.b @ self.a.T) result2 = self.b @ self.ma.T assert np.all(result2.mask) assert_array_equal(result2.unmasked, self.b @ self.a.T) new_ma = self.ma.T.copy() new_ma.mask[0, 0] = False result3 = self.b @ new_ma assert_array_equal(result3.unmasked, self.b @ self.a.T) assert_array_equal(result3.mask, new_ma.mask.any(0)) def test_vecvec(self): result = self.mb @ self.mb assert result.shape == () assert result.mask assert result.unmasked == self.b @ self.b mb_no_mask = Masked(self.b, False) result2 = mb_no_mask @ mb_no_mask assert not result2.mask class TestMaskedArrayOperators(MaskedOperatorTests): # Some further tests that use strings, which are not useful for Quantity. @pytest.mark.parametrize('op', (operator.eq, operator.ne)) def test_equality_strings(self, op): m1 = Masked(np.array(['a', 'b', 'c']), mask=[True, False, False]) m2 = Masked(np.array(['a', 'b', 'd']), mask=[False, False, False]) result = op(m1, m2) assert_array_equal(result.unmasked, op(m1.unmasked, m2.unmasked)) assert_array_equal(result.mask, m1.mask | m2.mask) result2 = op(m1, m2.unmasked) assert_masked_equal(result2, result) def test_not_implemented(self): with pytest.raises(TypeError): Masked(['a', 'b']) > object() class TestMaskedQuantityOperators(MaskedOperatorTests, QuantitySetup): pass class TestMaskedLongitudeOperators(MaskedOperatorTests, LongitudeSetup): pass class TestMaskedArrayMethods(MaskedArraySetup): def test_round(self): # Goes via ufunc, hence easy. mrc = self.mc.round() expected = Masked(self.c.round(), self.mask_c) assert_masked_equal(mrc, expected) @pytest.mark.parametrize('axis', (0, 1, None)) def test_sum(self, axis): ma_sum = self.ma.sum(axis) expected_data = self.a.sum(axis) expected_mask = self.ma.mask.any(axis) assert_array_equal(ma_sum.unmasked, expected_data) assert_array_equal(ma_sum.mask, expected_mask) @pytest.mark.parametrize('axis', (0, 1, None)) def test_cumsum(self, axis): ma_sum = self.ma.cumsum(axis) expected_data = self.a.cumsum(axis) mask = self.mask_a if axis is None: mask = mask.ravel() expected_mask = np.logical_or.accumulate(mask, axis=axis) assert_array_equal(ma_sum.unmasked, expected_data) assert_array_equal(ma_sum.mask, expected_mask) @pytest.mark.parametrize('axis', (0, 1, None)) def test_mean(self, axis): ma_mean = self.ma.mean(axis) filled = self.a.copy() filled[self.mask_a] = 0. count = 1 - self.ma.mask.astype(int) expected_data = filled.sum(axis) / count.sum(axis) expected_mask = self.ma.mask.all(axis) assert_array_equal(ma_mean.unmasked, expected_data) assert_array_equal(ma_mean.mask, expected_mask) def test_mean_int16(self): ma = self.ma.astype('i2') ma_mean = ma.mean() assert ma_mean.dtype == 'f8' expected = ma.astype('f8').mean() assert_masked_equal(ma_mean, expected) def test_mean_float16(self): ma = self.ma.astype('f2') ma_mean = ma.mean() assert ma_mean.dtype == 'f2' expected = self.ma.mean().astype('f2') assert_masked_equal(ma_mean, expected) def test_mean_inplace(self): expected = self.ma.mean(1) out = Masked(np.zeros_like(expected.unmasked)) result = self.ma.mean(1, out=out) assert result is out assert_masked_equal(out, expected) @pytest.mark.filterwarnings("ignore:.*true_divide.*") @pytest.mark.parametrize('axis', (0, 1, None)) def test_var(self, axis): ma_var = self.ma.var(axis) filled = (self.a - self.ma.mean(axis, keepdims=True))**2 filled[self.mask_a] = 0. count = (1 - self.ma.mask.astype(int)).sum(axis) expected_data = filled.sum(axis) / count expected_mask = self.ma.mask.all(axis) assert_array_equal(ma_var.unmasked, expected_data) assert_array_equal(ma_var.mask, expected_mask) ma_var1 = self.ma.var(axis, ddof=1) expected_data1 = filled.sum(axis) / (count - 1) expected_mask1 = self.ma.mask.all(axis) | (count <= 1) assert_array_equal(ma_var1.unmasked, expected_data1) assert_array_equal(ma_var1.mask, expected_mask1) ma_var5 = self.ma.var(axis, ddof=5) assert np.all(~np.isfinite(ma_var5.unmasked)) assert ma_var5.mask.all() def test_var_int16(self): ma = self.ma.astype('i2') ma_var = ma.var() assert ma_var.dtype == 'f8' expected = ma.astype('f8').var() assert_masked_equal(ma_var, expected) def test_std(self): ma_std = self.ma.std(1, ddof=1) ma_var1 = self.ma.var(1, ddof=1) expected = np.sqrt(ma_var1) assert_masked_equal(ma_std, expected) def test_std_inplace(self): expected = self.ma.std(1, ddof=1) out = Masked(np.zeros_like(expected.unmasked)) result = self.ma.std(1, ddof=1, out=out) assert result is out assert_masked_equal(result, expected) @pytest.mark.parametrize('axis', (0, 1, None)) def test_min(self, axis): ma_min = self.ma.min(axis) filled = self.a.copy() filled[self.mask_a] = self.a.max() expected_data = filled.min(axis) assert_array_equal(ma_min.unmasked, expected_data) assert not np.any(ma_min.mask) @pytest.mark.parametrize('axis', (0, 1, None)) def test_max(self, axis): ma_max = self.ma.max(axis) filled = self.a.copy() filled[self.mask_a] = self.a.min() expected_data = filled.max(axis) assert_array_equal(ma_max.unmasked, expected_data) assert not np.any(ma_max.mask) @pytest.mark.parametrize('axis', (0, 1, None)) def test_argmin(self, axis): ma_argmin = self.ma.argmin(axis) filled = self.a.copy() filled[self.mask_a] = self.a.max() expected_data = filled.argmin(axis) assert_array_equal(ma_argmin, expected_data) def test_argmin_only_one_unmasked_element(self): # Regression test for example from @taldcroft at # https://github.com/astropy/astropy/pull/11127#discussion_r600864559 ma = Masked(data=[1, 2], mask=[True, False]) assert ma.argmin() == 1 @pytest.mark.parametrize('axis', (0, 1, None)) def test_argmax(self, axis): ma_argmax = self.ma.argmax(axis) filled = self.a.copy() filled[self.mask_a] = self.a.min() expected_data = filled.argmax(axis) assert_array_equal(ma_argmax, expected_data) @pytest.mark.parametrize('axis', (0, 1, None)) def test_argsort(self, axis): ma_argsort = self.ma.argsort(axis) filled = self.a.copy() filled[self.mask_a] = self.a.max() * 1.1 expected_data = filled.argsort(axis) assert_array_equal(ma_argsort, expected_data) @pytest.mark.parametrize('order', [None, 'a', ('a', 'b'), ('b', 'a')]) @pytest.mark.parametrize('axis', [0, 1]) def test_structured_argsort(self, axis, order): ma_argsort = self.msa.argsort(axis, order=order) filled = self.msa.filled(fill_value=np.array((np.inf, np.inf), dtype=self.sdt)) expected_data = filled.argsort(axis, order=order) assert_array_equal(ma_argsort, expected_data) def test_argsort_error(self): with pytest.raises(ValueError, match='when the array has no fields'): self.ma.argsort(axis=0, order='a') @pytest.mark.parametrize('axis', (0, 1)) def test_sort(self, axis): ma_sort = self.ma.copy() ma_sort.sort(axis) indices = self.ma.argsort(axis) expected_data = np.take_along_axis(self.ma.unmasked, indices, axis) expected_mask = np.take_along_axis(self.ma.mask, indices, axis) assert_array_equal(ma_sort.unmasked, expected_data) assert_array_equal(ma_sort.mask, expected_mask) @pytest.mark.parametrize('kth', [1, 3]) def test_argpartition(self, kth): ma = self.ma.ravel() ma_argpartition = ma.argpartition(kth) partitioned = ma[ma_argpartition] assert (partitioned[:kth] < partitioned[kth]).all() assert (partitioned[kth:] >= partitioned[kth]).all() if partitioned[kth].mask: assert all(partitioned.mask[kth:]) else: assert not any(partitioned.mask[:kth]) @pytest.mark.parametrize('kth', [1, 3]) def test_partition(self, kth): partitioned = self.ma.flatten() partitioned.partition(kth) assert (partitioned[:kth] < partitioned[kth]).all() assert (partitioned[kth:] >= partitioned[kth]).all() if partitioned[kth].mask: assert all(partitioned.mask[kth:]) else: assert not any(partitioned.mask[:kth]) def test_all_explicit(self): a1 = np.array([[1., 2.], [3., 4.]]) a2 = np.array([[1., 0.], [3., 4.]]) if self._data_cls is not np.ndarray: a1 = self._data_cls(a1, self.a.unit) a2 = self._data_cls(a2, self.a.unit) ma1 = Masked(a1, mask=[[False, False], [True, True]]) ma2 = Masked(a2, mask=[[False, True], [False, True]]) ma1_eq_ma2 = ma1 == ma2 assert_array_equal(ma1_eq_ma2.unmasked, np.array([[True, False], [True, True]])) assert_array_equal(ma1_eq_ma2.mask, np.array([[False, True], [True, True]])) assert ma1_eq_ma2.all() assert not (ma1 != ma2).all() ma_eq1 = ma1_eq_ma2.all(1) assert_array_equal(ma_eq1.mask, np.array([False, True])) assert bool(ma_eq1[0]) is True assert bool(ma_eq1[1]) is False ma_eq0 = ma1_eq_ma2.all(0) assert_array_equal(ma_eq0.mask, np.array([False, True])) assert bool(ma_eq1[0]) is True assert bool(ma_eq1[1]) is False @pytest.mark.parametrize('method', ['any', 'all']) @pytest.mark.parametrize('array,axis', [ ('a', 0), ('a', 1), ('a', None), ('b', None), ('c', 0), ('c', 1), ('c', None)]) def test_all_and_any(self, array, axis, method): ma = getattr(self, 'm'+array) ma_eq = ma == ma ma_all_or_any = getattr(ma_eq, method)(axis=axis) filled = ma_eq.unmasked.copy() filled[ma_eq.mask] = method == 'all' a_all_or_any = getattr(filled, method)(axis=axis) all_masked = ma.mask.all(axis) assert_array_equal(ma_all_or_any.mask, all_masked) assert_array_equal(ma_all_or_any.unmasked, a_all_or_any) # interpretation as bool as_bool = [bool(a) for a in ma_all_or_any.ravel()] expected = [bool(a) for a in (a_all_or_any & ~all_masked).ravel()] assert as_bool == expected def test_any_inplace(self): ma_eq = self.ma == self.ma expected = ma_eq.any(1) out = Masked(np.zeros_like(expected.unmasked)) result = ma_eq.any(1, out=out) assert result is out assert_masked_equal(result, expected) @pytest.mark.parametrize('offset', (0, 1)) def test_diagonal(self, offset): mda = self.ma.diagonal(offset=offset) expected = Masked(self.a.diagonal(offset=offset), self.mask_a.diagonal(offset=offset)) assert_masked_equal(mda, expected) @pytest.mark.parametrize('offset', (0, 1)) def test_trace(self, offset): mta = self.ma.trace(offset=offset) expected = Masked(self.a.trace(offset=offset), self.mask_a.trace(offset=offset, dtype=bool)) assert_masked_equal(mta, expected) def test_clip(self): maclip = self.ma.clip(self.b, self.c) expected = Masked(self.a.clip(self.b, self.c), self.mask_a) assert_masked_equal(maclip, expected) def test_clip_masked_min_max(self): maclip = self.ma.clip(self.mb, self.mc) # Need to be careful with min, max because of Longitude, which wraps. dmax = np.maximum(np.maximum(self.a, self.b), self.c).max() dmin = np.minimum(np.minimum(self.a, self.b), self.c).min() expected = Masked(self.a.clip(self.mb.filled(dmin), self.mc.filled(dmax)), mask=self.mask_a) assert_masked_equal(maclip, expected) class TestMaskedQuantityMethods(TestMaskedArrayMethods, QuantitySetup): pass class TestMaskedLongitudeMethods(TestMaskedArrayMethods, LongitudeSetup): pass class TestMaskedArrayProductMethods(MaskedArraySetup): # These cannot work on Quantity, so done separately @pytest.mark.parametrize('axis', (0, 1, None)) def test_prod(self, axis): ma_sum = self.ma.prod(axis) expected_data = self.a.prod(axis) expected_mask = self.ma.mask.any(axis) assert_array_equal(ma_sum.unmasked, expected_data) assert_array_equal(ma_sum.mask, expected_mask) @pytest.mark.parametrize('axis', (0, 1, None)) def test_cumprod(self, axis): ma_sum = self.ma.cumprod(axis) expected_data = self.a.cumprod(axis) mask = self.mask_a if axis is None: mask = mask.ravel() expected_mask = np.logical_or.accumulate(mask, axis=axis) assert_array_equal(ma_sum.unmasked, expected_data) assert_array_equal(ma_sum.mask, expected_mask) def test_masked_str_explicit(): sa = np.array([(1., 2.), (3., 4.)], dtype='f8,f8') msa = Masked(sa, [(False, True), (False, False)]) assert str(msa) == "[(1., ——) (3., 4.)]" assert str(msa[0]) == "(1., ——)" assert str(msa[1]) == "(3., 4.)" with np.printoptions(precision=3, floatmode='fixed'): assert str(msa) == "[(1.000, ———) (3.000, 4.000)]" def test_masked_repr_explicit(): # Use explicit endianness to ensure tests pass on all architectures sa = np.array([(1., 2.), (3., 4.)], dtype='>f8,>f8') msa = Masked(sa, [(False, True), (False, False)]) assert repr(msa) == ("MaskedNDArray([(1., ——), (3., 4.)], " "dtype=[('f0', '>f8'), ('f1', '>f8')])") assert repr(msa[0]) == ("MaskedNDArray((1., ——), " "dtype=[('f0', '>f8'), ('f1', '>f8')])") assert repr(msa[1]) == ("MaskedNDArray((3., 4.), " "dtype=[('f0', '>f8'), ('f1', '>f8')])") def test_masked_repr_summary(): ma = Masked(np.arange(15.), mask=[True]+[False]*14) with np.printoptions(threshold=2): assert repr(ma) == ( "MaskedNDArray([———, 1., 2., ..., 12., 13., 14.])") def test_masked_repr_nodata(): assert repr(Masked([])) == "MaskedNDArray([], dtype=float64)" class TestMaskedArrayRepr(MaskedArraySetup): def test_array_str(self): # very blunt check they work at all. str(self.ma) str(self.mb) str(self.mc) str(self.msa) str(self.msb) def test_scalar_str(self): assert self.mb[0].shape == () str(self.mb[0]) assert self.msb[0].shape == () str(self.msb[0]) def test_array_repr(self): repr(self.ma) repr(self.mb) repr(self.mc) repr(self.msa) repr(self.msb) def test_scalar_repr(self): repr(self.mb[0]) repr(self.msb[0]) class TestMaskedQuantityRepr(TestMaskedArrayRepr, QuantitySetup): pass class TestMaskedRecarray(MaskedArraySetup): @classmethod def setup_class(self): super().setup_class() self.ra = self.sa.view(np.recarray) self.mra = Masked(self.ra, mask=self.mask_sa) def test_recarray_setup(self): assert isinstance(self.mra, Masked) assert isinstance(self.mra, np.recarray) assert np.all(self.mra.unmasked == self.ra) assert np.all(self.mra.mask == self.mask_sa) assert_array_equal(self.mra.view(np.ndarray), self.sa) assert isinstance(self.mra.a, Masked) assert_array_equal(self.mra.a.unmasked, self.sa['a']) assert_array_equal(self.mra.a.mask, self.mask_sa['a']) def test_recarray_setting(self): mra = self.mra.copy() mra.a = self.msa['b'] assert_array_equal(mra.a.unmasked, self.msa['b'].unmasked) assert_array_equal(mra.a.mask, self.msa['b'].mask) @pytest.mark.parametrize('attr', [0, 'a']) def test_recarray_field_getting(self, attr): mra_a = self.mra.field(attr) assert isinstance(mra_a, Masked) assert_array_equal(mra_a.unmasked, self.sa['a']) assert_array_equal(mra_a.mask, self.mask_sa['a']) @pytest.mark.parametrize('attr', [0, 'a']) def test_recarray_field_setting(self, attr): mra = self.mra.copy() mra.field(attr, self.msa['b']) assert_array_equal(mra.a.unmasked, self.msa['b'].unmasked) assert_array_equal(mra.a.mask, self.msa['b'].mask)
pllim/astropy
astropy/utils/masked/tests/test_masked.py
astropy/utils/shapes.py
# Licensed under a 3-clause BSD style license - see LICENSE.rst """ This file contains a contains the high-level functions to read a VOTable file. """ # STDLIB import io import os import sys import textwrap import warnings # LOCAL from . import exceptions from . import tree from astropy.utils.xml import iterparser from astropy.utils import data from astropy.utils.decorators import deprecated_renamed_argument from astropy.utils.exceptions import AstropyDeprecationWarning __all__ = ['parse', 'parse_single_table', 'from_table', 'writeto', 'validate', 'reset_vo_warnings'] VERIFY_OPTIONS = ['ignore', 'warn', 'exception'] @deprecated_renamed_argument('pedantic', 'verify', pending=True, since='4.0') def parse(source, columns=None, invalid='exception', verify=None, chunk_size=tree.DEFAULT_CHUNK_SIZE, table_number=None, table_id=None, filename=None, unit_format=None, datatype_mapping=None, _debug_python_based_parser=False): """ Parses a VOTABLE_ xml file (or file-like object), and returns a `~astropy.io.votable.tree.VOTableFile` object. Parameters ---------- source : path-like or file-like Path or file-like object containing a VOTABLE_ xml file. If file, must be readable. columns : sequence of str, optional List of field names to include in the output. The default is to include all fields. invalid : str, optional One of the following values: - 'exception': throw an exception when an invalid value is encountered (default) - 'mask': mask out invalid values verify : {'ignore', 'warn', 'exception'}, optional When ``'exception'``, raise an error when the file violates the spec, otherwise either issue a warning (``'warn'``) or silently continue (``'ignore'``). Warnings may be controlled using the standard Python mechanisms. See the `warnings` module in the Python standard library for more information. When not provided, uses the configuration setting ``astropy.io.votable.verify``, which defaults to 'ignore'. .. versionchanged:: 4.0 ``verify`` replaces the ``pedantic`` argument, which will be deprecated in future. chunk_size : int, optional The number of rows to read before converting to an array. Higher numbers are likely to be faster, but will consume more memory. table_number : int, optional The number of table in the file to read in. If `None`, all tables will be read. If a number, 0 refers to the first table in the file, and only that numbered table will be parsed and read in. Should not be used with ``table_id``. table_id : str, optional The ID of the table in the file to read in. Should not be used with ``table_number``. filename : str, optional A filename, URL or other identifier to use in error messages. If *filename* is None and *source* is a string (i.e. a path), then *source* will be used as a filename for error messages. Therefore, *filename* is only required when source is a file-like object. unit_format : str, astropy.units.format.Base instance or None, optional The unit format to use when parsing unit attributes. If a string, must be the name of a unit formatter. The built-in formats include ``generic``, ``fits``, ``cds``, and ``vounit``. A custom formatter may be provided by passing a `~astropy.units.UnitBase` instance. If `None` (default), the unit format to use will be the one specified by the VOTable specification (which is ``cds`` up to version 1.3 of VOTable, and ``vounit`` in more recent versions of the spec). datatype_mapping : dict, optional A mapping of datatype names (`str`) to valid VOTable datatype names (str). For example, if the file being read contains the datatype "unsignedInt" (an invalid datatype in VOTable), include the mapping ``{"unsignedInt": "long"}``. Returns ------- votable : `~astropy.io.votable.tree.VOTableFile` object See also -------- astropy.io.votable.exceptions : The exceptions this function may raise. """ from . import conf invalid = invalid.lower() if invalid not in ('exception', 'mask'): raise ValueError("accepted values of ``invalid`` are: " "``'exception'`` or ``'mask'``.") if verify is None: # NOTE: since the pedantic argument isn't fully deprecated yet, we need # to catch the deprecation warning that occurs when accessing the # configuration item, but only if it is for the pedantic option in the # [io.votable] section. with warnings.catch_warnings(): warnings.filterwarnings("ignore", r"Config parameter \'pedantic\' in section \[io.votable\]", AstropyDeprecationWarning) conf_verify_lowercase = conf.verify.lower() # We need to allow verify to be booleans as strings since the # configuration framework doesn't make it easy/possible to have mixed # types. if conf_verify_lowercase in ['false', 'true']: verify = conf_verify_lowercase == 'true' else: verify = conf_verify_lowercase if isinstance(verify, bool): verify = 'exception' if verify else 'warn' elif verify not in VERIFY_OPTIONS: raise ValueError(f"verify should be one of {'/'.join(VERIFY_OPTIONS)}") if datatype_mapping is None: datatype_mapping = {} config = { 'columns': columns, 'invalid': invalid, 'verify': verify, 'chunk_size': chunk_size, 'table_number': table_number, 'filename': filename, 'unit_format': unit_format, 'datatype_mapping': datatype_mapping } if filename is None and isinstance(source, str): config['filename'] = source with iterparser.get_xml_iterator( source, _debug_python_based_parser=_debug_python_based_parser) as iterator: return tree.VOTableFile( config=config, pos=(1, 1)).parse(iterator, config) def parse_single_table(source, **kwargs): """ Parses a VOTABLE_ xml file (or file-like object), reading and returning only the first `~astropy.io.votable.tree.Table` instance. See `parse` for a description of the keyword arguments. Returns ------- votable : `~astropy.io.votable.tree.Table` object """ if kwargs.get('table_number') is None: kwargs['table_number'] = 0 votable = parse(source, **kwargs) return votable.get_first_table() def writeto(table, file, tabledata_format=None): """ Writes a `~astropy.io.votable.tree.VOTableFile` to a VOTABLE_ xml file. Parameters ---------- table : `~astropy.io.votable.tree.VOTableFile` or `~astropy.table.Table` instance. file : str or writable file-like Path or file object to write to tabledata_format : str, optional Override the format of the table(s) data to write. Must be one of ``tabledata`` (text representation), ``binary`` or ``binary2``. By default, use the format that was specified in each ``table`` object as it was created or read in. See :ref:`astropy:astropy:votable-serialization`. """ from astropy.table import Table if isinstance(table, Table): table = tree.VOTableFile.from_table(table) elif not isinstance(table, tree.VOTableFile): raise TypeError( "first argument must be astropy.io.vo.VOTableFile or " "astropy.table.Table instance") table.to_xml(file, tabledata_format=tabledata_format, _debug_python_based_parser=True) def validate(source, output=None, xmllint=False, filename=None): """ Prints a validation report for the given file. Parameters ---------- source : path-like or file-like Path to a VOTABLE_ xml file or `~pathlib.Path` object having Path to a VOTABLE_ xml file. If file-like object, must be readable. output : file-like, optional Where to output the report. Defaults to ``sys.stdout``. If `None`, the output will be returned as a string. Must be writable. xmllint : bool, optional When `True`, also send the file to ``xmllint`` for schema and DTD validation. Requires that ``xmllint`` is installed. The default is `False`. ``source`` must be a file on the local filesystem in order for ``xmllint`` to work. filename : str, optional A filename to use in the error messages. If not provided, one will be automatically determined from ``source``. Returns ------- is_valid : bool or str Returns `True` if no warnings were found. If ``output`` is `None`, the return value will be a string. """ from astropy.utils.console import print_code_line, color_print if output is None: output = sys.stdout return_as_str = False if output is None: output = io.StringIO() lines = [] votable = None reset_vo_warnings() with data.get_readable_fileobj(source, encoding='binary') as fd: content = fd.read() content_buffer = io.BytesIO(content) content_buffer.seek(0) if filename is None: if isinstance(source, str): filename = source elif hasattr(source, 'name'): filename = source.name elif hasattr(source, 'url'): filename = source.url else: filename = "<unknown>" with warnings.catch_warnings(record=True) as warning_lines: warnings.resetwarnings() warnings.simplefilter("always", exceptions.VOWarning, append=True) try: votable = parse(content_buffer, verify='warn', filename=filename) except ValueError as e: lines.append(str(e)) lines = [str(x.message) for x in warning_lines if issubclass(x.category, exceptions.VOWarning)] + lines content_buffer.seek(0) output.write(f"Validation report for {filename}\n\n") if len(lines): xml_lines = iterparser.xml_readlines(content_buffer) for warning in lines: w = exceptions.parse_vowarning(warning) if not w['is_something']: output.write(w['message']) output.write('\n\n') else: line = xml_lines[w['nline'] - 1] warning = w['warning'] if w['is_warning']: color = 'yellow' else: color = 'red' color_print( f"{w['nline']:d}: ", '', warning or 'EXC', color, ': ', '', textwrap.fill( w['message'], initial_indent=' ', subsequent_indent=' ').lstrip(), file=output) print_code_line(line, w['nchar'], file=output) output.write('\n') else: output.write('astropy.io.votable found no violations.\n\n') success = 0 if xmllint and os.path.exists(filename): from . import xmlutil if votable is None: version = "1.1" else: version = votable.version success, stdout, stderr = xmlutil.validate_schema( filename, version) if success != 0: output.write( 'xmllint schema violations:\n\n') output.write(stderr.decode('utf-8')) else: output.write('xmllint passed\n') if return_as_str: return output.getvalue() return len(lines) == 0 and success == 0 def from_table(table, table_id=None): """ Given an `~astropy.table.Table` object, return a `~astropy.io.votable.tree.VOTableFile` file structure containing just that single table. Parameters ---------- table : `~astropy.table.Table` instance table_id : str, optional If not `None`, set the given id on the returned `~astropy.io.votable.tree.Table` instance. Returns ------- votable : `~astropy.io.votable.tree.VOTableFile` instance """ return tree.VOTableFile.from_table(table, table_id=table_id) def is_votable(source): """ Reads the header of a file to determine if it is a VOTable file. Parameters ---------- source : path-like or file-like Path or file object containing a VOTABLE_ xml file. If file, must be readable. Returns ------- is_votable : bool Returns `True` if the given file is a VOTable file. """ try: with iterparser.get_xml_iterator(source) as iterator: for start, tag, d, pos in iterator: if tag != 'xml': return False break for start, tag, d, pos in iterator: if tag != 'VOTABLE': return False break return True except ValueError: return False def reset_vo_warnings(): """ Resets all of the vo warning state so that warnings that have already been emitted will be emitted again. This is used, for example, by `validate` which must emit all warnings each time it is called. """ from . import converters, xmlutil # -----------------------------------------------------------# # This is a special variable used by the Python warnings # # infrastructure to keep track of warnings that have # # already been seen. Since we want to get every single # # warning out of this, we have to delete all of them first. # # -----------------------------------------------------------# for module in (converters, exceptions, tree, xmlutil): if hasattr(module, '__warningregistry__'): del module.__warningregistry__
# Licensed under a 3-clause BSD style license - see LICENSE.rst """Test masked class initialization, methods, and operators. Functions, including ufuncs, are tested in test_functions.py """ import operator import numpy as np from numpy.testing import assert_array_equal import pytest from astropy import units as u from astropy.units import Quantity from astropy.coordinates import Longitude from astropy.utils.masked import Masked, MaskedNDArray def assert_masked_equal(a, b): assert_array_equal(a.unmasked, b.unmasked) assert_array_equal(a.mask, b.mask) VARIOUS_ITEMS = [ (1, 1), slice(None, 1), (), 1] class ArraySetup: _data_cls = np.ndarray @classmethod def setup_class(self): self.a = np.arange(6.).reshape(2, 3) self.mask_a = np.array([[True, False, False], [False, True, False]]) self.b = np.array([-3., -2., -1.]) self.mask_b = np.array([False, True, False]) self.c = np.array([[0.25], [0.5]]) self.mask_c = np.array([[False], [True]]) self.sdt = np.dtype([('a', 'f8'), ('b', 'f8')]) self.mask_sdt = np.dtype([('a', '?'), ('b', '?')]) self.sa = np.array([[(1., 2.), (3., 4.)], [(11., 12.), (13., 14.)]], dtype=self.sdt) self.mask_sa = np.array([[(True, True), (False, False)], [(False, True), (True, False)]], dtype=self.mask_sdt) self.sb = np.array([(1., 2.), (-3., 4.)], dtype=self.sdt) self.mask_sb = np.array([(True, False), (False, False)], dtype=self.mask_sdt) class QuantitySetup(ArraySetup): _data_cls = Quantity @classmethod def setup_class(self): super().setup_class() self.a = Quantity(self.a, u.m) self.b = Quantity(self.b, u.cm) self.c = Quantity(self.c, u.km) self.sa = Quantity(self.sa, u.m, dtype=self.sdt) self.sb = Quantity(self.sb, u.cm, dtype=self.sdt) class LongitudeSetup(ArraySetup): _data_cls = Longitude @classmethod def setup_class(self): super().setup_class() self.a = Longitude(self.a, u.deg) self.b = Longitude(self.b, u.deg) self.c = Longitude(self.c, u.deg) # Note: Longitude does not work on structured arrays, so # leaving it as regular array (which just reruns some tests). class TestMaskedArrayInitialization(ArraySetup): def test_simple(self): ma = Masked(self.a, mask=self.mask_a) assert isinstance(ma, np.ndarray) assert isinstance(ma, type(self.a)) assert isinstance(ma, Masked) assert_array_equal(ma.unmasked, self.a) assert_array_equal(ma.mask, self.mask_a) assert ma.mask is not self.mask_a assert np.may_share_memory(ma.mask, self.mask_a) def test_structured(self): ma = Masked(self.sa, mask=self.mask_sa) assert isinstance(ma, np.ndarray) assert isinstance(ma, type(self.sa)) assert isinstance(ma, Masked) assert_array_equal(ma.unmasked, self.sa) assert_array_equal(ma.mask, self.mask_sa) assert ma.mask is not self.mask_sa assert np.may_share_memory(ma.mask, self.mask_sa) def test_masked_ndarray_init(): # Note: as a straight ndarray subclass, MaskedNDArray passes on # the arguments relevant for np.ndarray, not np.array. a_in = np.arange(3, dtype=int) m_in = np.array([True, False, False]) buff = a_in.tobytes() # Check we're doing things correctly using regular ndarray. a = np.ndarray(shape=(3,), dtype=int, buffer=buff) assert_array_equal(a, a_in) # Check with and without mask. ma = MaskedNDArray((3,), dtype=int, mask=m_in, buffer=buff) assert_array_equal(ma.unmasked, a_in) assert_array_equal(ma.mask, m_in) ma = MaskedNDArray((3,), dtype=int, buffer=buff) assert_array_equal(ma.unmasked, a_in) assert_array_equal(ma.mask, np.zeros(3, bool)) def test_cannot_initialize_with_masked(): with pytest.raises(ValueError, match='cannot handle np.ma.masked'): Masked(np.ma.masked) def test_cannot_just_use_anything_with_a_mask_attribute(): class my_array(np.ndarray): mask = True a = np.array([1., 2.]).view(my_array) with pytest.raises(AttributeError, match='unmasked'): Masked(a) class TestMaskedClassCreation: """Try creating a MaskedList and subclasses. By no means meant to be realistic, just to check that the basic machinery allows it. """ @classmethod def setup_class(self): self._base_classes_orig = Masked._base_classes.copy() self._masked_classes_orig = Masked._masked_classes.copy() class MaskedList(Masked, list, base_cls=list, data_cls=list): def __new__(cls, *args, mask=None, copy=False, **kwargs): self = super().__new__(cls) self._unmasked = self._data_cls(*args, **kwargs) self.mask = mask return self # Need to have shape for basics to work. @property def shape(self): return (len(self._unmasked),) self.MaskedList = MaskedList def teardown_class(self): Masked._base_classes = self._base_classes_orig Masked._masked_classes = self._masked_classes_orig def test_setup(self): assert issubclass(self.MaskedList, Masked) assert issubclass(self.MaskedList, list) assert Masked(list) is self.MaskedList def test_masked_list(self): ml = self.MaskedList(range(3), mask=[True, False, False]) assert ml.unmasked == [0, 1, 2] assert_array_equal(ml.mask, np.array([True, False, False])) ml01 = ml[:2] assert ml01.unmasked == [0, 1] assert_array_equal(ml01.mask, np.array([True, False])) def test_from_list(self): ml = Masked([1, 2, 3], mask=[True, False, False]) assert ml.unmasked == [1, 2, 3] assert_array_equal(ml.mask, np.array([True, False, False])) def test_masked_list_subclass(self): class MyList(list): pass ml = MyList(range(3)) mml = Masked(ml, mask=[False, True, False]) assert isinstance(mml, Masked) assert isinstance(mml, MyList) assert isinstance(mml.unmasked, MyList) assert mml.unmasked == [0, 1, 2] assert_array_equal(mml.mask, np.array([False, True, False])) assert Masked(MyList) is type(mml) class TestMaskedNDArraySubclassCreation: """Test that masked subclasses can be created directly and indirectly.""" @classmethod def setup_class(self): class MyArray(np.ndarray): def __new__(cls, *args, **kwargs): return np.asanyarray(*args, **kwargs).view(cls) self.MyArray = MyArray self.a = np.array([1., 2.]).view(self.MyArray) self.m = np.array([True, False], dtype=bool) def teardown_method(self, method): Masked._masked_classes.pop(self.MyArray, None) def test_direct_creation(self): assert self.MyArray not in Masked._masked_classes mcls = Masked(self.MyArray) assert issubclass(mcls, Masked) assert issubclass(mcls, self.MyArray) assert mcls.__name__ == 'MaskedMyArray' assert mcls.__doc__.startswith('Masked version of MyArray') mms = mcls(self.a, mask=self.m) assert isinstance(mms, mcls) assert_array_equal(mms.unmasked, self.a) assert_array_equal(mms.mask, self.m) def test_initialization_without_mask(self): # Default for not giving a mask should be False. mcls = Masked(self.MyArray) mms = mcls(self.a) assert isinstance(mms, mcls) assert_array_equal(mms.unmasked, self.a) assert_array_equal(mms.mask, np.zeros(mms.shape, bool)) @pytest.mark.parametrize('masked_array', [Masked, np.ma.MaskedArray]) def test_initialization_with_masked_values(self, masked_array): mcls = Masked(self.MyArray) ma = masked_array(np.asarray(self.a), mask=self.m) mms = mcls(ma) assert isinstance(mms, Masked) assert isinstance(mms, self.MyArray) assert_array_equal(mms.unmasked, self.a) assert_array_equal(mms.mask, self.m) def test_indirect_creation(self): assert self.MyArray not in Masked._masked_classes mms = Masked(self.a, mask=self.m) assert isinstance(mms, Masked) assert isinstance(mms, self.MyArray) assert_array_equal(mms.unmasked, self.a) assert_array_equal(mms.mask, self.m) assert self.MyArray in Masked._masked_classes assert Masked(self.MyArray) is type(mms) def test_can_initialize_with_masked_values(self): mcls = Masked(self.MyArray) mms = mcls(Masked(np.asarray(self.a), mask=self.m)) assert isinstance(mms, Masked) assert isinstance(mms, self.MyArray) assert_array_equal(mms.unmasked, self.a) assert_array_equal(mms.mask, self.m) def test_viewing(self): mms = Masked(self.a, mask=self.m) mms2 = mms.view() assert type(mms2) is mms.__class__ assert_masked_equal(mms2, mms) ma = mms.view(np.ndarray) assert type(ma) is MaskedNDArray assert_array_equal(ma.unmasked, self.a.view(np.ndarray)) assert_array_equal(ma.mask, self.m) class TestMaskedQuantityInitialization(TestMaskedArrayInitialization, QuantitySetup): def test_masked_quantity_class_init(self): # TODO: class definitions should be more easily accessible. mcls = Masked._masked_classes[self.a.__class__] # This is not a very careful test. mq = mcls([1., 2.], mask=[True, False], unit=u.s) assert mq.unit == u.s assert np.all(mq.value.unmasked == [1., 2.]) assert np.all(mq.value.mask == [True, False]) assert np.all(mq.mask == [True, False]) def test_masked_quantity_getting(self): mcls = Masked._masked_classes[self.a.__class__] MQ = Masked(Quantity) assert MQ is mcls def test_initialization_without_mask(self): # Default for not giving a mask should be False. MQ = Masked(Quantity) mq = MQ([1., 2.], u.s) assert mq.unit == u.s assert np.all(mq.value.unmasked == [1., 2.]) assert np.all(mq.mask == [False, False]) @pytest.mark.parametrize('masked_array', [Masked, np.ma.MaskedArray]) def test_initialization_with_masked_values(self, masked_array): MQ = Masked(Quantity) a = np.array([1., 2.]) m = np.array([True, False]) ma = masked_array(a, m) mq = MQ(ma) assert isinstance(mq, Masked) assert isinstance(mq, Quantity) assert_array_equal(mq.value.unmasked, a) assert_array_equal(mq.mask, m) class TestMaskSetting(ArraySetup): def test_whole_mask_setting_simple(self): ma = Masked(self.a) assert ma.mask.shape == ma.shape assert not ma.mask.any() ma.mask = True assert ma.mask.shape == ma.shape assert ma.mask.all() ma.mask = [[True], [False]] assert ma.mask.shape == ma.shape assert_array_equal(ma.mask, np.array([[True] * 3, [False] * 3])) ma.mask = self.mask_a assert ma.mask.shape == ma.shape assert_array_equal(ma.mask, self.mask_a) assert ma.mask is not self.mask_a assert np.may_share_memory(ma.mask, self.mask_a) def test_whole_mask_setting_structured(self): ma = Masked(self.sa) assert ma.mask.shape == ma.shape assert not ma.mask['a'].any() and not ma.mask['b'].any() ma.mask = True assert ma.mask.shape == ma.shape assert ma.mask['a'].all() and ma.mask['b'].all() ma.mask = [[True], [False]] assert ma.mask.shape == ma.shape assert_array_equal(ma.mask, np.array( [[(True, True)] * 2, [(False, False)] * 2], dtype=self.mask_sdt)) ma.mask = self.mask_sa assert ma.mask.shape == ma.shape assert_array_equal(ma.mask, self.mask_sa) assert ma.mask is not self.mask_sa assert np.may_share_memory(ma.mask, self.mask_sa) @pytest.mark.parametrize('item', VARIOUS_ITEMS) def test_part_mask_setting(self, item): ma = Masked(self.a) ma.mask[item] = True expected = np.zeros(ma.shape, bool) expected[item] = True assert_array_equal(ma.mask, expected) ma.mask[item] = False assert_array_equal(ma.mask, np.zeros(ma.shape, bool)) # Mask propagation mask = np.zeros(self.a.shape, bool) ma = Masked(self.a, mask) ma.mask[item] = True assert np.may_share_memory(ma.mask, mask) assert_array_equal(ma.mask, mask) @pytest.mark.parametrize('item', ['a'] + VARIOUS_ITEMS) def test_part_mask_setting_structured(self, item): ma = Masked(self.sa) ma.mask[item] = True expected = np.zeros(ma.shape, self.mask_sdt) expected[item] = True assert_array_equal(ma.mask, expected) ma.mask[item] = False assert_array_equal(ma.mask, np.zeros(ma.shape, self.mask_sdt)) # Mask propagation mask = np.zeros(self.sa.shape, self.mask_sdt) ma = Masked(self.sa, mask) ma.mask[item] = True assert np.may_share_memory(ma.mask, mask) assert_array_equal(ma.mask, mask) # Following are tests where we trust the initializer works. class MaskedArraySetup(ArraySetup): @classmethod def setup_class(self): super().setup_class() self.ma = Masked(self.a, mask=self.mask_a) self.mb = Masked(self.b, mask=self.mask_b) self.mc = Masked(self.c, mask=self.mask_c) self.msa = Masked(self.sa, mask=self.mask_sa) self.msb = Masked(self.sb, mask=self.mask_sb) class TestViewing(MaskedArraySetup): def test_viewing_as_new_type(self): ma2 = self.ma.view(type(self.ma)) assert_masked_equal(ma2, self.ma) ma3 = self.ma.view() assert_masked_equal(ma3, self.ma) def test_viewing_as_new_dtype(self): # Not very meaningful, but possible... ma2 = self.ma.view('c8') assert_array_equal(ma2.unmasked, self.a.view('c8')) assert_array_equal(ma2.mask, self.mask_a) @pytest.mark.parametrize('new_dtype', ['2f4', 'f8,f8,f8']) def test_viewing_as_new_dtype_not_implemented(self, new_dtype): # But cannot (yet) view in way that would need to create a new mask, # even though that view is possible for a regular array. check = self.a.view(new_dtype) with pytest.raises(NotImplementedError, match='different.*size'): self.ma.view(check.dtype) def test_viewing_as_something_impossible(self): with pytest.raises(TypeError): # Use intp to ensure have the same size as object, # otherwise we get a different error message Masked(np.array([1, 2], dtype=np.intp)).view(Masked) class TestMaskedArrayCopyFilled(MaskedArraySetup): def test_copy(self): ma_copy = self.ma.copy() assert type(ma_copy) is type(self.ma) assert_array_equal(ma_copy.unmasked, self.ma.unmasked) assert_array_equal(ma_copy.mask, self.ma.mask) assert not np.may_share_memory(ma_copy.unmasked, self.ma.unmasked) assert not np.may_share_memory(ma_copy.mask, self.ma.mask) @pytest.mark.parametrize('fill_value', (0, 1)) def test_filled(self, fill_value): fill_value = fill_value * getattr(self.a, 'unit', 1) expected = self.a.copy() expected[self.ma.mask] = fill_value result = self.ma.filled(fill_value) assert_array_equal(expected, result) def test_filled_no_fill_value(self): with pytest.raises(TypeError, match='missing 1 required'): self.ma.filled() @pytest.mark.parametrize('fill_value', [(0, 1), (-1, -1)]) def test_filled_structured(self, fill_value): fill_value = np.array(fill_value, dtype=self.sdt) if hasattr(self.sa, 'unit'): fill_value = fill_value << self.sa.unit expected = self.sa.copy() expected['a'][self.msa.mask['a']] = fill_value['a'] expected['b'][self.msa.mask['b']] = fill_value['b'] result = self.msa.filled(fill_value) assert_array_equal(expected, result) def test_flat(self): ma_copy = self.ma.copy() ma_flat = ma_copy.flat # Check that single item keeps class and mask ma_flat1 = ma_flat[1] assert ma_flat1.unmasked == self.a.flat[1] assert ma_flat1.mask == self.mask_a.flat[1] # As well as getting items via iteration. assert all((ma.unmasked == a and ma.mask == m) for (ma, a, m) in zip(self.ma.flat, self.a.flat, self.mask_a.flat)) # check that flat works like a view of the real array ma_flat[1] = self.b[1] assert ma_flat[1] == self.b[1] assert ma_copy[0, 1] == self.b[1] class TestMaskedQuantityCopyFilled(TestMaskedArrayCopyFilled, QuantitySetup): pass class TestMaskedLongitudeCopyFilled(TestMaskedArrayCopyFilled, LongitudeSetup): pass class TestMaskedArrayShaping(MaskedArraySetup): def test_reshape(self): ma_reshape = self.ma.reshape((6,)) expected_data = self.a.reshape((6,)) expected_mask = self.mask_a.reshape((6,)) assert ma_reshape.shape == expected_data.shape assert_array_equal(ma_reshape.unmasked, expected_data) assert_array_equal(ma_reshape.mask, expected_mask) def test_shape_setting(self): ma_reshape = self.ma.copy() ma_reshape.shape = 6, expected_data = self.a.reshape((6,)) expected_mask = self.mask_a.reshape((6,)) assert ma_reshape.shape == expected_data.shape assert_array_equal(ma_reshape.unmasked, expected_data) assert_array_equal(ma_reshape.mask, expected_mask) def test_shape_setting_failure(self): ma = self.ma.copy() with pytest.raises(ValueError, match='cannot reshape'): ma.shape = 5, assert ma.shape == self.ma.shape assert ma.mask.shape == self.ma.shape # Here, mask can be reshaped but array cannot. ma2 = Masked(np.broadcast_to([[1.], [2.]], self.a.shape), mask=self.mask_a) with pytest.raises(AttributeError, match='ncompatible shape'): ma2.shape = 6, assert ma2.shape == self.ma.shape assert ma2.mask.shape == self.ma.shape # Here, array can be reshaped but mask cannot. ma3 = Masked(self.a.copy(), mask=np.broadcast_to([[True], [False]], self.mask_a.shape)) with pytest.raises(AttributeError, match='ncompatible shape'): ma3.shape = 6, assert ma3.shape == self.ma.shape assert ma3.mask.shape == self.ma.shape def test_ravel(self): ma_ravel = self.ma.ravel() expected_data = self.a.ravel() expected_mask = self.mask_a.ravel() assert ma_ravel.shape == expected_data.shape assert_array_equal(ma_ravel.unmasked, expected_data) assert_array_equal(ma_ravel.mask, expected_mask) def test_transpose(self): ma_transpose = self.ma.transpose() expected_data = self.a.transpose() expected_mask = self.mask_a.transpose() assert ma_transpose.shape == expected_data.shape assert_array_equal(ma_transpose.unmasked, expected_data) assert_array_equal(ma_transpose.mask, expected_mask) def test_iter(self): for ma, d, m in zip(self.ma, self.a, self.mask_a): assert_array_equal(ma.unmasked, d) assert_array_equal(ma.mask, m) class MaskedItemTests(MaskedArraySetup): @pytest.mark.parametrize('item', VARIOUS_ITEMS) def test_getitem(self, item): ma_part = self.ma[item] expected_data = self.a[item] expected_mask = self.mask_a[item] assert_array_equal(ma_part.unmasked, expected_data) assert_array_equal(ma_part.mask, expected_mask) @pytest.mark.parametrize('item', ['a'] + VARIOUS_ITEMS) def test_getitem_structured(self, item): ma_part = self.msa[item] expected_data = self.sa[item] expected_mask = self.mask_sa[item] assert_array_equal(ma_part.unmasked, expected_data) assert_array_equal(ma_part.mask, expected_mask) @pytest.mark.parametrize('indices,axis', [ ([0, 1], 1), ([0, 1], 0), ([0, 1], None), ([[0, 1], [2, 3]], None)]) def test_take(self, indices, axis): ma_take = self.ma.take(indices, axis=axis) expected_data = self.a.take(indices, axis=axis) expected_mask = self.mask_a.take(indices, axis=axis) assert_array_equal(ma_take.unmasked, expected_data) assert_array_equal(ma_take.mask, expected_mask) ma_take2 = np.take(self.ma, indices, axis=axis) assert_masked_equal(ma_take2, ma_take) @pytest.mark.parametrize('item', VARIOUS_ITEMS) @pytest.mark.parametrize('mask', [None, True, False]) def test_setitem(self, item, mask): base = self.ma.copy() expected_data = self.a.copy() expected_mask = self.mask_a.copy() value = self.a[0, 0] if mask is None else Masked(self.a[0, 0], mask) base[item] = value expected_data[item] = value if mask is None else value.unmasked expected_mask[item] = False if mask is None else value.mask assert_array_equal(base.unmasked, expected_data) assert_array_equal(base.mask, expected_mask) @pytest.mark.parametrize('item', ['a'] + VARIOUS_ITEMS) @pytest.mark.parametrize('mask', [None, True, False]) def test_setitem_structured(self, item, mask): base = self.msa.copy() expected_data = self.sa.copy() expected_mask = self.mask_sa.copy() value = self.sa['b'] if item == 'a' else self.sa[0, 0] if mask is not None: value = Masked(value, mask) base[item] = value expected_data[item] = value if mask is None else value.unmasked expected_mask[item] = False if mask is None else value.mask assert_array_equal(base.unmasked, expected_data) assert_array_equal(base.mask, expected_mask) @pytest.mark.parametrize('item', VARIOUS_ITEMS) def test_setitem_np_ma_masked(self, item): base = self.ma.copy() expected_mask = self.mask_a.copy() base[item] = np.ma.masked expected_mask[item] = True assert_array_equal(base.unmasked, self.a) assert_array_equal(base.mask, expected_mask) class TestMaskedArrayItems(MaskedItemTests): @classmethod def setup_class(self): super().setup_class() self.d = np.array(['aa', 'bb']) self.mask_d = np.array([True, False]) self.md = Masked(self.d, self.mask_d) # Quantity, Longitude cannot hold strings. def test_getitem_strings(self): md = self.md.copy() md0 = md[0] assert md0.unmasked == self.d[0] assert md0.mask md_all = md[:] assert_masked_equal(md_all, md) def test_setitem_strings_np_ma_masked(self): md = self.md.copy() md[1] = np.ma.masked assert_array_equal(md.unmasked, self.d) assert_array_equal(md.mask, np.ones(2, bool)) class TestMaskedQuantityItems(MaskedItemTests, QuantitySetup): pass class TestMaskedLongitudeItems(MaskedItemTests, LongitudeSetup): pass class MaskedOperatorTests(MaskedArraySetup): @pytest.mark.parametrize('op', (operator.add, operator.sub)) def test_add_subtract(self, op): mapmb = op(self.ma, self.mb) expected_data = op(self.a, self.b) expected_mask = (self.ma.mask | self.mb.mask) # Note: assert_array_equal also checks type, i.e., that, e.g., # Longitude decays into an Angle. assert_array_equal(mapmb.unmasked, expected_data) assert_array_equal(mapmb.mask, expected_mask) @pytest.mark.parametrize('op', (operator.eq, operator.ne)) def test_equality(self, op): mapmb = op(self.ma, self.mb) expected_data = op(self.a, self.b) expected_mask = (self.ma.mask | self.mb.mask) # Note: assert_array_equal also checks type, i.e., that boolean # output is represented as plain Masked ndarray. assert_array_equal(mapmb.unmasked, expected_data) assert_array_equal(mapmb.mask, expected_mask) def test_not_implemented(self): with pytest.raises(TypeError): self.ma > 'abc' @pytest.mark.parametrize('different_names', [False, True]) @pytest.mark.parametrize('op', (operator.eq, operator.ne)) def test_structured_equality(self, op, different_names): msb = self.msb if different_names: msb = msb.astype([(f'different_{name}', dt) for name, dt in msb.dtype.fields.items()]) mapmb = op(self.msa, self.msb) # Expected is a bit tricky here: only unmasked fields count expected_data = np.ones(mapmb.shape, bool) expected_mask = np.ones(mapmb.shape, bool) for field in self.sdt.names: fa, mfa = self.sa[field], self.mask_sa[field] fb, mfb = self.sb[field], self.mask_sb[field] mfequal = mfa | mfb fequal = (fa == fb) | mfequal expected_data &= fequal expected_mask &= mfequal if op is operator.ne: expected_data = ~expected_data # Note: assert_array_equal also checks type, i.e., that boolean # output is represented as plain Masked ndarray. assert_array_equal(mapmb.unmasked, expected_data) assert_array_equal(mapmb.mask, expected_mask) def test_matmul(self): result = self.ma.T @ self.ma assert_array_equal(result.unmasked, self.a.T @ self.a) mask1 = np.any(self.mask_a, axis=0) expected_mask = np.logical_or.outer(mask1, mask1) assert_array_equal(result.mask, expected_mask) result2 = self.ma.T @ self.a assert_array_equal(result2.unmasked, self.a.T @ self.a) expected_mask2 = np.logical_or.outer(mask1, np.zeros(3, bool)) assert_array_equal(result2.mask, expected_mask2) result3 = self.a.T @ self.ma assert_array_equal(result3.unmasked, self.a.T @ self.a) expected_mask3 = np.logical_or.outer(np.zeros(3, bool), mask1) assert_array_equal(result3.mask, expected_mask3) def test_matvec(self): result = self.ma @ self.mb assert np.all(result.mask) assert_array_equal(result.unmasked, self.a @ self.b) # Just using the masked vector still has all elements masked. result2 = self.a @ self.mb assert np.all(result2.mask) assert_array_equal(result2.unmasked, self.a @ self.b) new_ma = self.ma.copy() new_ma.mask[0, 0] = False result3 = new_ma @ self.b assert_array_equal(result3.unmasked, self.a @ self.b) assert_array_equal(result3.mask, new_ma.mask.any(-1)) def test_vecmat(self): result = self.mb @ self.ma.T assert np.all(result.mask) assert_array_equal(result.unmasked, self.b @ self.a.T) result2 = self.b @ self.ma.T assert np.all(result2.mask) assert_array_equal(result2.unmasked, self.b @ self.a.T) new_ma = self.ma.T.copy() new_ma.mask[0, 0] = False result3 = self.b @ new_ma assert_array_equal(result3.unmasked, self.b @ self.a.T) assert_array_equal(result3.mask, new_ma.mask.any(0)) def test_vecvec(self): result = self.mb @ self.mb assert result.shape == () assert result.mask assert result.unmasked == self.b @ self.b mb_no_mask = Masked(self.b, False) result2 = mb_no_mask @ mb_no_mask assert not result2.mask class TestMaskedArrayOperators(MaskedOperatorTests): # Some further tests that use strings, which are not useful for Quantity. @pytest.mark.parametrize('op', (operator.eq, operator.ne)) def test_equality_strings(self, op): m1 = Masked(np.array(['a', 'b', 'c']), mask=[True, False, False]) m2 = Masked(np.array(['a', 'b', 'd']), mask=[False, False, False]) result = op(m1, m2) assert_array_equal(result.unmasked, op(m1.unmasked, m2.unmasked)) assert_array_equal(result.mask, m1.mask | m2.mask) result2 = op(m1, m2.unmasked) assert_masked_equal(result2, result) def test_not_implemented(self): with pytest.raises(TypeError): Masked(['a', 'b']) > object() class TestMaskedQuantityOperators(MaskedOperatorTests, QuantitySetup): pass class TestMaskedLongitudeOperators(MaskedOperatorTests, LongitudeSetup): pass class TestMaskedArrayMethods(MaskedArraySetup): def test_round(self): # Goes via ufunc, hence easy. mrc = self.mc.round() expected = Masked(self.c.round(), self.mask_c) assert_masked_equal(mrc, expected) @pytest.mark.parametrize('axis', (0, 1, None)) def test_sum(self, axis): ma_sum = self.ma.sum(axis) expected_data = self.a.sum(axis) expected_mask = self.ma.mask.any(axis) assert_array_equal(ma_sum.unmasked, expected_data) assert_array_equal(ma_sum.mask, expected_mask) @pytest.mark.parametrize('axis', (0, 1, None)) def test_cumsum(self, axis): ma_sum = self.ma.cumsum(axis) expected_data = self.a.cumsum(axis) mask = self.mask_a if axis is None: mask = mask.ravel() expected_mask = np.logical_or.accumulate(mask, axis=axis) assert_array_equal(ma_sum.unmasked, expected_data) assert_array_equal(ma_sum.mask, expected_mask) @pytest.mark.parametrize('axis', (0, 1, None)) def test_mean(self, axis): ma_mean = self.ma.mean(axis) filled = self.a.copy() filled[self.mask_a] = 0. count = 1 - self.ma.mask.astype(int) expected_data = filled.sum(axis) / count.sum(axis) expected_mask = self.ma.mask.all(axis) assert_array_equal(ma_mean.unmasked, expected_data) assert_array_equal(ma_mean.mask, expected_mask) def test_mean_int16(self): ma = self.ma.astype('i2') ma_mean = ma.mean() assert ma_mean.dtype == 'f8' expected = ma.astype('f8').mean() assert_masked_equal(ma_mean, expected) def test_mean_float16(self): ma = self.ma.astype('f2') ma_mean = ma.mean() assert ma_mean.dtype == 'f2' expected = self.ma.mean().astype('f2') assert_masked_equal(ma_mean, expected) def test_mean_inplace(self): expected = self.ma.mean(1) out = Masked(np.zeros_like(expected.unmasked)) result = self.ma.mean(1, out=out) assert result is out assert_masked_equal(out, expected) @pytest.mark.filterwarnings("ignore:.*true_divide.*") @pytest.mark.parametrize('axis', (0, 1, None)) def test_var(self, axis): ma_var = self.ma.var(axis) filled = (self.a - self.ma.mean(axis, keepdims=True))**2 filled[self.mask_a] = 0. count = (1 - self.ma.mask.astype(int)).sum(axis) expected_data = filled.sum(axis) / count expected_mask = self.ma.mask.all(axis) assert_array_equal(ma_var.unmasked, expected_data) assert_array_equal(ma_var.mask, expected_mask) ma_var1 = self.ma.var(axis, ddof=1) expected_data1 = filled.sum(axis) / (count - 1) expected_mask1 = self.ma.mask.all(axis) | (count <= 1) assert_array_equal(ma_var1.unmasked, expected_data1) assert_array_equal(ma_var1.mask, expected_mask1) ma_var5 = self.ma.var(axis, ddof=5) assert np.all(~np.isfinite(ma_var5.unmasked)) assert ma_var5.mask.all() def test_var_int16(self): ma = self.ma.astype('i2') ma_var = ma.var() assert ma_var.dtype == 'f8' expected = ma.astype('f8').var() assert_masked_equal(ma_var, expected) def test_std(self): ma_std = self.ma.std(1, ddof=1) ma_var1 = self.ma.var(1, ddof=1) expected = np.sqrt(ma_var1) assert_masked_equal(ma_std, expected) def test_std_inplace(self): expected = self.ma.std(1, ddof=1) out = Masked(np.zeros_like(expected.unmasked)) result = self.ma.std(1, ddof=1, out=out) assert result is out assert_masked_equal(result, expected) @pytest.mark.parametrize('axis', (0, 1, None)) def test_min(self, axis): ma_min = self.ma.min(axis) filled = self.a.copy() filled[self.mask_a] = self.a.max() expected_data = filled.min(axis) assert_array_equal(ma_min.unmasked, expected_data) assert not np.any(ma_min.mask) @pytest.mark.parametrize('axis', (0, 1, None)) def test_max(self, axis): ma_max = self.ma.max(axis) filled = self.a.copy() filled[self.mask_a] = self.a.min() expected_data = filled.max(axis) assert_array_equal(ma_max.unmasked, expected_data) assert not np.any(ma_max.mask) @pytest.mark.parametrize('axis', (0, 1, None)) def test_argmin(self, axis): ma_argmin = self.ma.argmin(axis) filled = self.a.copy() filled[self.mask_a] = self.a.max() expected_data = filled.argmin(axis) assert_array_equal(ma_argmin, expected_data) def test_argmin_only_one_unmasked_element(self): # Regression test for example from @taldcroft at # https://github.com/astropy/astropy/pull/11127#discussion_r600864559 ma = Masked(data=[1, 2], mask=[True, False]) assert ma.argmin() == 1 @pytest.mark.parametrize('axis', (0, 1, None)) def test_argmax(self, axis): ma_argmax = self.ma.argmax(axis) filled = self.a.copy() filled[self.mask_a] = self.a.min() expected_data = filled.argmax(axis) assert_array_equal(ma_argmax, expected_data) @pytest.mark.parametrize('axis', (0, 1, None)) def test_argsort(self, axis): ma_argsort = self.ma.argsort(axis) filled = self.a.copy() filled[self.mask_a] = self.a.max() * 1.1 expected_data = filled.argsort(axis) assert_array_equal(ma_argsort, expected_data) @pytest.mark.parametrize('order', [None, 'a', ('a', 'b'), ('b', 'a')]) @pytest.mark.parametrize('axis', [0, 1]) def test_structured_argsort(self, axis, order): ma_argsort = self.msa.argsort(axis, order=order) filled = self.msa.filled(fill_value=np.array((np.inf, np.inf), dtype=self.sdt)) expected_data = filled.argsort(axis, order=order) assert_array_equal(ma_argsort, expected_data) def test_argsort_error(self): with pytest.raises(ValueError, match='when the array has no fields'): self.ma.argsort(axis=0, order='a') @pytest.mark.parametrize('axis', (0, 1)) def test_sort(self, axis): ma_sort = self.ma.copy() ma_sort.sort(axis) indices = self.ma.argsort(axis) expected_data = np.take_along_axis(self.ma.unmasked, indices, axis) expected_mask = np.take_along_axis(self.ma.mask, indices, axis) assert_array_equal(ma_sort.unmasked, expected_data) assert_array_equal(ma_sort.mask, expected_mask) @pytest.mark.parametrize('kth', [1, 3]) def test_argpartition(self, kth): ma = self.ma.ravel() ma_argpartition = ma.argpartition(kth) partitioned = ma[ma_argpartition] assert (partitioned[:kth] < partitioned[kth]).all() assert (partitioned[kth:] >= partitioned[kth]).all() if partitioned[kth].mask: assert all(partitioned.mask[kth:]) else: assert not any(partitioned.mask[:kth]) @pytest.mark.parametrize('kth', [1, 3]) def test_partition(self, kth): partitioned = self.ma.flatten() partitioned.partition(kth) assert (partitioned[:kth] < partitioned[kth]).all() assert (partitioned[kth:] >= partitioned[kth]).all() if partitioned[kth].mask: assert all(partitioned.mask[kth:]) else: assert not any(partitioned.mask[:kth]) def test_all_explicit(self): a1 = np.array([[1., 2.], [3., 4.]]) a2 = np.array([[1., 0.], [3., 4.]]) if self._data_cls is not np.ndarray: a1 = self._data_cls(a1, self.a.unit) a2 = self._data_cls(a2, self.a.unit) ma1 = Masked(a1, mask=[[False, False], [True, True]]) ma2 = Masked(a2, mask=[[False, True], [False, True]]) ma1_eq_ma2 = ma1 == ma2 assert_array_equal(ma1_eq_ma2.unmasked, np.array([[True, False], [True, True]])) assert_array_equal(ma1_eq_ma2.mask, np.array([[False, True], [True, True]])) assert ma1_eq_ma2.all() assert not (ma1 != ma2).all() ma_eq1 = ma1_eq_ma2.all(1) assert_array_equal(ma_eq1.mask, np.array([False, True])) assert bool(ma_eq1[0]) is True assert bool(ma_eq1[1]) is False ma_eq0 = ma1_eq_ma2.all(0) assert_array_equal(ma_eq0.mask, np.array([False, True])) assert bool(ma_eq1[0]) is True assert bool(ma_eq1[1]) is False @pytest.mark.parametrize('method', ['any', 'all']) @pytest.mark.parametrize('array,axis', [ ('a', 0), ('a', 1), ('a', None), ('b', None), ('c', 0), ('c', 1), ('c', None)]) def test_all_and_any(self, array, axis, method): ma = getattr(self, 'm'+array) ma_eq = ma == ma ma_all_or_any = getattr(ma_eq, method)(axis=axis) filled = ma_eq.unmasked.copy() filled[ma_eq.mask] = method == 'all' a_all_or_any = getattr(filled, method)(axis=axis) all_masked = ma.mask.all(axis) assert_array_equal(ma_all_or_any.mask, all_masked) assert_array_equal(ma_all_or_any.unmasked, a_all_or_any) # interpretation as bool as_bool = [bool(a) for a in ma_all_or_any.ravel()] expected = [bool(a) for a in (a_all_or_any & ~all_masked).ravel()] assert as_bool == expected def test_any_inplace(self): ma_eq = self.ma == self.ma expected = ma_eq.any(1) out = Masked(np.zeros_like(expected.unmasked)) result = ma_eq.any(1, out=out) assert result is out assert_masked_equal(result, expected) @pytest.mark.parametrize('offset', (0, 1)) def test_diagonal(self, offset): mda = self.ma.diagonal(offset=offset) expected = Masked(self.a.diagonal(offset=offset), self.mask_a.diagonal(offset=offset)) assert_masked_equal(mda, expected) @pytest.mark.parametrize('offset', (0, 1)) def test_trace(self, offset): mta = self.ma.trace(offset=offset) expected = Masked(self.a.trace(offset=offset), self.mask_a.trace(offset=offset, dtype=bool)) assert_masked_equal(mta, expected) def test_clip(self): maclip = self.ma.clip(self.b, self.c) expected = Masked(self.a.clip(self.b, self.c), self.mask_a) assert_masked_equal(maclip, expected) def test_clip_masked_min_max(self): maclip = self.ma.clip(self.mb, self.mc) # Need to be careful with min, max because of Longitude, which wraps. dmax = np.maximum(np.maximum(self.a, self.b), self.c).max() dmin = np.minimum(np.minimum(self.a, self.b), self.c).min() expected = Masked(self.a.clip(self.mb.filled(dmin), self.mc.filled(dmax)), mask=self.mask_a) assert_masked_equal(maclip, expected) class TestMaskedQuantityMethods(TestMaskedArrayMethods, QuantitySetup): pass class TestMaskedLongitudeMethods(TestMaskedArrayMethods, LongitudeSetup): pass class TestMaskedArrayProductMethods(MaskedArraySetup): # These cannot work on Quantity, so done separately @pytest.mark.parametrize('axis', (0, 1, None)) def test_prod(self, axis): ma_sum = self.ma.prod(axis) expected_data = self.a.prod(axis) expected_mask = self.ma.mask.any(axis) assert_array_equal(ma_sum.unmasked, expected_data) assert_array_equal(ma_sum.mask, expected_mask) @pytest.mark.parametrize('axis', (0, 1, None)) def test_cumprod(self, axis): ma_sum = self.ma.cumprod(axis) expected_data = self.a.cumprod(axis) mask = self.mask_a if axis is None: mask = mask.ravel() expected_mask = np.logical_or.accumulate(mask, axis=axis) assert_array_equal(ma_sum.unmasked, expected_data) assert_array_equal(ma_sum.mask, expected_mask) def test_masked_str_explicit(): sa = np.array([(1., 2.), (3., 4.)], dtype='f8,f8') msa = Masked(sa, [(False, True), (False, False)]) assert str(msa) == "[(1., ——) (3., 4.)]" assert str(msa[0]) == "(1., ——)" assert str(msa[1]) == "(3., 4.)" with np.printoptions(precision=3, floatmode='fixed'): assert str(msa) == "[(1.000, ———) (3.000, 4.000)]" def test_masked_repr_explicit(): # Use explicit endianness to ensure tests pass on all architectures sa = np.array([(1., 2.), (3., 4.)], dtype='>f8,>f8') msa = Masked(sa, [(False, True), (False, False)]) assert repr(msa) == ("MaskedNDArray([(1., ——), (3., 4.)], " "dtype=[('f0', '>f8'), ('f1', '>f8')])") assert repr(msa[0]) == ("MaskedNDArray((1., ——), " "dtype=[('f0', '>f8'), ('f1', '>f8')])") assert repr(msa[1]) == ("MaskedNDArray((3., 4.), " "dtype=[('f0', '>f8'), ('f1', '>f8')])") def test_masked_repr_summary(): ma = Masked(np.arange(15.), mask=[True]+[False]*14) with np.printoptions(threshold=2): assert repr(ma) == ( "MaskedNDArray([———, 1., 2., ..., 12., 13., 14.])") def test_masked_repr_nodata(): assert repr(Masked([])) == "MaskedNDArray([], dtype=float64)" class TestMaskedArrayRepr(MaskedArraySetup): def test_array_str(self): # very blunt check they work at all. str(self.ma) str(self.mb) str(self.mc) str(self.msa) str(self.msb) def test_scalar_str(self): assert self.mb[0].shape == () str(self.mb[0]) assert self.msb[0].shape == () str(self.msb[0]) def test_array_repr(self): repr(self.ma) repr(self.mb) repr(self.mc) repr(self.msa) repr(self.msb) def test_scalar_repr(self): repr(self.mb[0]) repr(self.msb[0]) class TestMaskedQuantityRepr(TestMaskedArrayRepr, QuantitySetup): pass class TestMaskedRecarray(MaskedArraySetup): @classmethod def setup_class(self): super().setup_class() self.ra = self.sa.view(np.recarray) self.mra = Masked(self.ra, mask=self.mask_sa) def test_recarray_setup(self): assert isinstance(self.mra, Masked) assert isinstance(self.mra, np.recarray) assert np.all(self.mra.unmasked == self.ra) assert np.all(self.mra.mask == self.mask_sa) assert_array_equal(self.mra.view(np.ndarray), self.sa) assert isinstance(self.mra.a, Masked) assert_array_equal(self.mra.a.unmasked, self.sa['a']) assert_array_equal(self.mra.a.mask, self.mask_sa['a']) def test_recarray_setting(self): mra = self.mra.copy() mra.a = self.msa['b'] assert_array_equal(mra.a.unmasked, self.msa['b'].unmasked) assert_array_equal(mra.a.mask, self.msa['b'].mask) @pytest.mark.parametrize('attr', [0, 'a']) def test_recarray_field_getting(self, attr): mra_a = self.mra.field(attr) assert isinstance(mra_a, Masked) assert_array_equal(mra_a.unmasked, self.sa['a']) assert_array_equal(mra_a.mask, self.mask_sa['a']) @pytest.mark.parametrize('attr', [0, 'a']) def test_recarray_field_setting(self, attr): mra = self.mra.copy() mra.field(attr, self.msa['b']) assert_array_equal(mra.a.unmasked, self.msa['b'].unmasked) assert_array_equal(mra.a.mask, self.msa['b'].mask)
pllim/astropy
astropy/utils/masked/tests/test_masked.py
astropy/io/votable/table.py
import numbers from collections import defaultdict import numpy as np from astropy.utils import isiterable from astropy.utils.decorators import lazyproperty from ..low_level_api import BaseLowLevelWCS from .base import BaseWCSWrapper __all__ = ['sanitize_slices', 'SlicedLowLevelWCS'] def sanitize_slices(slices, ndim): """ Given a slice as input sanitise it to an easier to parse format.format This function returns a list ``ndim`` long containing slice objects (or ints). """ if not isinstance(slices, (tuple, list)): # We just have a single int slices = (slices,) if len(slices) > ndim: raise ValueError( f"The dimensionality of the specified slice {slices} can not be greater " f"than the dimensionality ({ndim}) of the wcs.") if any((isiterable(s) for s in slices)): raise IndexError("This slice is invalid, only integer or range slices are supported.") slices = list(slices) if Ellipsis in slices: if slices.count(Ellipsis) > 1: raise IndexError("an index can only have a single ellipsis ('...')") # Replace the Ellipsis with the correct number of slice(None)s e_ind = slices.index(Ellipsis) slices.remove(Ellipsis) n_e = ndim - len(slices) for i in range(n_e): ind = e_ind + i slices.insert(ind, slice(None)) for i in range(ndim): if i < len(slices): slc = slices[i] if isinstance(slc, slice): if slc.step and slc.step != 1: raise IndexError("Slicing WCS with a step is not supported.") elif not isinstance(slc, numbers.Integral): raise IndexError("Only integer or range slices are accepted.") else: slices.append(slice(None)) return slices def combine_slices(slice1, slice2): """ Given two slices that can be applied to a 1-d array, find the resulting slice that corresponds to the combination of both slices. We assume that slice2 can be an integer, but slice1 cannot. """ if isinstance(slice1, slice) and slice1.step is not None: raise ValueError('Only slices with steps of 1 are supported') if isinstance(slice2, slice) and slice2.step is not None: raise ValueError('Only slices with steps of 1 are supported') if isinstance(slice2, numbers.Integral): if slice1.start is None: return slice2 else: return slice2 + slice1.start if slice1.start is None: if slice1.stop is None: return slice2 else: if slice2.stop is None: return slice(slice2.start, slice1.stop) else: return slice(slice2.start, min(slice1.stop, slice2.stop)) else: if slice2.start is None: start = slice1.start else: start = slice1.start + slice2.start if slice2.stop is None: stop = slice1.stop else: if slice1.start is None: stop = slice2.stop else: stop = slice2.stop + slice1.start if slice1.stop is not None: stop = min(slice1.stop, stop) return slice(start, stop) class SlicedLowLevelWCS(BaseWCSWrapper): """ A Low Level WCS wrapper which applies an array slice to a WCS. This class does not modify the underlying WCS object and can therefore drop coupled dimensions as it stores which pixel and world dimensions have been sliced out (or modified) in the underlying WCS and returns the modified results on all the Low Level WCS methods. Parameters ---------- wcs : `~astropy.wcs.wcsapi.BaseLowLevelWCS` The WCS to slice. slices : `slice` or `tuple` or `int` A valid array slice to apply to the WCS. """ def __init__(self, wcs, slices): slices = sanitize_slices(slices, wcs.pixel_n_dim) if isinstance(wcs, SlicedLowLevelWCS): # Here we combine the current slices with the previous slices # to avoid ending up with many nested WCSes self._wcs = wcs._wcs slices_original = wcs._slices_array.copy() for ipixel in range(wcs.pixel_n_dim): ipixel_orig = wcs._wcs.pixel_n_dim - 1 - wcs._pixel_keep[ipixel] ipixel_new = wcs.pixel_n_dim - 1 - ipixel slices_original[ipixel_orig] = combine_slices(slices_original[ipixel_orig], slices[ipixel_new]) self._slices_array = slices_original else: self._wcs = wcs self._slices_array = slices self._slices_pixel = self._slices_array[::-1] # figure out which pixel dimensions have been kept, then use axis correlation # matrix to figure out which world dims are kept self._pixel_keep = np.nonzero([not isinstance(self._slices_pixel[ip], numbers.Integral) for ip in range(self._wcs.pixel_n_dim)])[0] # axis_correlation_matrix[world, pixel] self._world_keep = np.nonzero( self._wcs.axis_correlation_matrix[:, self._pixel_keep].any(axis=1))[0] if len(self._pixel_keep) == 0 or len(self._world_keep) == 0: raise ValueError("Cannot slice WCS: the resulting WCS should have " "at least one pixel and one world dimension.") @lazyproperty def dropped_world_dimensions(self): """ Information describing the dropped world dimensions. """ world_coords = self._pixel_to_world_values_all(*[0]*len(self._pixel_keep)) dropped_info = defaultdict(list) for i in range(self._wcs.world_n_dim): if i in self._world_keep: continue if "world_axis_object_classes" not in dropped_info: dropped_info["world_axis_object_classes"] = dict() wao_classes = self._wcs.world_axis_object_classes wao_components = self._wcs.world_axis_object_components dropped_info["value"].append(world_coords[i]) dropped_info["world_axis_names"].append(self._wcs.world_axis_names[i]) dropped_info["world_axis_physical_types"].append(self._wcs.world_axis_physical_types[i]) dropped_info["world_axis_units"].append(self._wcs.world_axis_units[i]) dropped_info["world_axis_object_components"].append(wao_components[i]) dropped_info["world_axis_object_classes"].update(dict( filter( lambda x: x[0] == wao_components[i][0], wao_classes.items() ) )) dropped_info["serialized_classes"] = self.serialized_classes return dict(dropped_info) @property def pixel_n_dim(self): return len(self._pixel_keep) @property def world_n_dim(self): return len(self._world_keep) @property def world_axis_physical_types(self): return [self._wcs.world_axis_physical_types[i] for i in self._world_keep] @property def world_axis_units(self): return [self._wcs.world_axis_units[i] for i in self._world_keep] @property def pixel_axis_names(self): return [self._wcs.pixel_axis_names[i] for i in self._pixel_keep] @property def world_axis_names(self): return [self._wcs.world_axis_names[i] for i in self._world_keep] def _pixel_to_world_values_all(self, *pixel_arrays): pixel_arrays = tuple(map(np.asanyarray, pixel_arrays)) pixel_arrays_new = [] ipix_curr = -1 for ipix in range(self._wcs.pixel_n_dim): if isinstance(self._slices_pixel[ipix], int): pixel_arrays_new.append(self._slices_pixel[ipix]) else: ipix_curr += 1 if self._slices_pixel[ipix].start is not None: pixel_arrays_new.append(pixel_arrays[ipix_curr] + self._slices_pixel[ipix].start) else: pixel_arrays_new.append(pixel_arrays[ipix_curr]) pixel_arrays_new = np.broadcast_arrays(*pixel_arrays_new) return self._wcs.pixel_to_world_values(*pixel_arrays_new) def pixel_to_world_values(self, *pixel_arrays): world_arrays = self._pixel_to_world_values_all(*pixel_arrays) # Detect the case of a length 0 array if isinstance(world_arrays, np.ndarray) and not world_arrays.shape: return world_arrays if self._wcs.world_n_dim > 1: # Select the dimensions of the original WCS we are keeping. world_arrays = [world_arrays[iw] for iw in self._world_keep] # If there is only one world dimension (after slicing) we shouldn't return a tuple. if self.world_n_dim == 1: world_arrays = world_arrays[0] return world_arrays def world_to_pixel_values(self, *world_arrays): world_arrays = tuple(map(np.asanyarray, world_arrays)) world_arrays_new = [] iworld_curr = -1 for iworld in range(self._wcs.world_n_dim): if iworld in self._world_keep: iworld_curr += 1 world_arrays_new.append(world_arrays[iworld_curr]) else: world_arrays_new.append(1.) world_arrays_new = np.broadcast_arrays(*world_arrays_new) pixel_arrays = list(self._wcs.world_to_pixel_values(*world_arrays_new)) for ipixel in range(self._wcs.pixel_n_dim): if isinstance(self._slices_pixel[ipixel], slice) and self._slices_pixel[ipixel].start is not None: pixel_arrays[ipixel] -= self._slices_pixel[ipixel].start # Detect the case of a length 0 array if isinstance(pixel_arrays, np.ndarray) and not pixel_arrays.shape: return pixel_arrays pixel = tuple(pixel_arrays[ip] for ip in self._pixel_keep) if self.pixel_n_dim == 1 and self._wcs.pixel_n_dim > 1: pixel = pixel[0] return pixel @property def world_axis_object_components(self): return [self._wcs.world_axis_object_components[idx] for idx in self._world_keep] @property def world_axis_object_classes(self): keys_keep = [item[0] for item in self.world_axis_object_components] return dict([item for item in self._wcs.world_axis_object_classes.items() if item[0] in keys_keep]) @property def array_shape(self): if self._wcs.array_shape: return np.broadcast_to(0, self._wcs.array_shape)[tuple(self._slices_array)].shape @property def pixel_shape(self): if self.array_shape: return tuple(self.array_shape[::-1]) @property def pixel_bounds(self): if self._wcs.pixel_bounds is None: return bounds = [] for idx in self._pixel_keep: if self._slices_pixel[idx].start is None: bounds.append(self._wcs.pixel_bounds[idx]) else: imin, imax = self._wcs.pixel_bounds[idx] start = self._slices_pixel[idx].start bounds.append((imin - start, imax - start)) return tuple(bounds) @property def axis_correlation_matrix(self): return self._wcs.axis_correlation_matrix[self._world_keep][:, self._pixel_keep]
# Licensed under a 3-clause BSD style license - see LICENSE.rst """Test masked class initialization, methods, and operators. Functions, including ufuncs, are tested in test_functions.py """ import operator import numpy as np from numpy.testing import assert_array_equal import pytest from astropy import units as u from astropy.units import Quantity from astropy.coordinates import Longitude from astropy.utils.masked import Masked, MaskedNDArray def assert_masked_equal(a, b): assert_array_equal(a.unmasked, b.unmasked) assert_array_equal(a.mask, b.mask) VARIOUS_ITEMS = [ (1, 1), slice(None, 1), (), 1] class ArraySetup: _data_cls = np.ndarray @classmethod def setup_class(self): self.a = np.arange(6.).reshape(2, 3) self.mask_a = np.array([[True, False, False], [False, True, False]]) self.b = np.array([-3., -2., -1.]) self.mask_b = np.array([False, True, False]) self.c = np.array([[0.25], [0.5]]) self.mask_c = np.array([[False], [True]]) self.sdt = np.dtype([('a', 'f8'), ('b', 'f8')]) self.mask_sdt = np.dtype([('a', '?'), ('b', '?')]) self.sa = np.array([[(1., 2.), (3., 4.)], [(11., 12.), (13., 14.)]], dtype=self.sdt) self.mask_sa = np.array([[(True, True), (False, False)], [(False, True), (True, False)]], dtype=self.mask_sdt) self.sb = np.array([(1., 2.), (-3., 4.)], dtype=self.sdt) self.mask_sb = np.array([(True, False), (False, False)], dtype=self.mask_sdt) class QuantitySetup(ArraySetup): _data_cls = Quantity @classmethod def setup_class(self): super().setup_class() self.a = Quantity(self.a, u.m) self.b = Quantity(self.b, u.cm) self.c = Quantity(self.c, u.km) self.sa = Quantity(self.sa, u.m, dtype=self.sdt) self.sb = Quantity(self.sb, u.cm, dtype=self.sdt) class LongitudeSetup(ArraySetup): _data_cls = Longitude @classmethod def setup_class(self): super().setup_class() self.a = Longitude(self.a, u.deg) self.b = Longitude(self.b, u.deg) self.c = Longitude(self.c, u.deg) # Note: Longitude does not work on structured arrays, so # leaving it as regular array (which just reruns some tests). class TestMaskedArrayInitialization(ArraySetup): def test_simple(self): ma = Masked(self.a, mask=self.mask_a) assert isinstance(ma, np.ndarray) assert isinstance(ma, type(self.a)) assert isinstance(ma, Masked) assert_array_equal(ma.unmasked, self.a) assert_array_equal(ma.mask, self.mask_a) assert ma.mask is not self.mask_a assert np.may_share_memory(ma.mask, self.mask_a) def test_structured(self): ma = Masked(self.sa, mask=self.mask_sa) assert isinstance(ma, np.ndarray) assert isinstance(ma, type(self.sa)) assert isinstance(ma, Masked) assert_array_equal(ma.unmasked, self.sa) assert_array_equal(ma.mask, self.mask_sa) assert ma.mask is not self.mask_sa assert np.may_share_memory(ma.mask, self.mask_sa) def test_masked_ndarray_init(): # Note: as a straight ndarray subclass, MaskedNDArray passes on # the arguments relevant for np.ndarray, not np.array. a_in = np.arange(3, dtype=int) m_in = np.array([True, False, False]) buff = a_in.tobytes() # Check we're doing things correctly using regular ndarray. a = np.ndarray(shape=(3,), dtype=int, buffer=buff) assert_array_equal(a, a_in) # Check with and without mask. ma = MaskedNDArray((3,), dtype=int, mask=m_in, buffer=buff) assert_array_equal(ma.unmasked, a_in) assert_array_equal(ma.mask, m_in) ma = MaskedNDArray((3,), dtype=int, buffer=buff) assert_array_equal(ma.unmasked, a_in) assert_array_equal(ma.mask, np.zeros(3, bool)) def test_cannot_initialize_with_masked(): with pytest.raises(ValueError, match='cannot handle np.ma.masked'): Masked(np.ma.masked) def test_cannot_just_use_anything_with_a_mask_attribute(): class my_array(np.ndarray): mask = True a = np.array([1., 2.]).view(my_array) with pytest.raises(AttributeError, match='unmasked'): Masked(a) class TestMaskedClassCreation: """Try creating a MaskedList and subclasses. By no means meant to be realistic, just to check that the basic machinery allows it. """ @classmethod def setup_class(self): self._base_classes_orig = Masked._base_classes.copy() self._masked_classes_orig = Masked._masked_classes.copy() class MaskedList(Masked, list, base_cls=list, data_cls=list): def __new__(cls, *args, mask=None, copy=False, **kwargs): self = super().__new__(cls) self._unmasked = self._data_cls(*args, **kwargs) self.mask = mask return self # Need to have shape for basics to work. @property def shape(self): return (len(self._unmasked),) self.MaskedList = MaskedList def teardown_class(self): Masked._base_classes = self._base_classes_orig Masked._masked_classes = self._masked_classes_orig def test_setup(self): assert issubclass(self.MaskedList, Masked) assert issubclass(self.MaskedList, list) assert Masked(list) is self.MaskedList def test_masked_list(self): ml = self.MaskedList(range(3), mask=[True, False, False]) assert ml.unmasked == [0, 1, 2] assert_array_equal(ml.mask, np.array([True, False, False])) ml01 = ml[:2] assert ml01.unmasked == [0, 1] assert_array_equal(ml01.mask, np.array([True, False])) def test_from_list(self): ml = Masked([1, 2, 3], mask=[True, False, False]) assert ml.unmasked == [1, 2, 3] assert_array_equal(ml.mask, np.array([True, False, False])) def test_masked_list_subclass(self): class MyList(list): pass ml = MyList(range(3)) mml = Masked(ml, mask=[False, True, False]) assert isinstance(mml, Masked) assert isinstance(mml, MyList) assert isinstance(mml.unmasked, MyList) assert mml.unmasked == [0, 1, 2] assert_array_equal(mml.mask, np.array([False, True, False])) assert Masked(MyList) is type(mml) class TestMaskedNDArraySubclassCreation: """Test that masked subclasses can be created directly and indirectly.""" @classmethod def setup_class(self): class MyArray(np.ndarray): def __new__(cls, *args, **kwargs): return np.asanyarray(*args, **kwargs).view(cls) self.MyArray = MyArray self.a = np.array([1., 2.]).view(self.MyArray) self.m = np.array([True, False], dtype=bool) def teardown_method(self, method): Masked._masked_classes.pop(self.MyArray, None) def test_direct_creation(self): assert self.MyArray not in Masked._masked_classes mcls = Masked(self.MyArray) assert issubclass(mcls, Masked) assert issubclass(mcls, self.MyArray) assert mcls.__name__ == 'MaskedMyArray' assert mcls.__doc__.startswith('Masked version of MyArray') mms = mcls(self.a, mask=self.m) assert isinstance(mms, mcls) assert_array_equal(mms.unmasked, self.a) assert_array_equal(mms.mask, self.m) def test_initialization_without_mask(self): # Default for not giving a mask should be False. mcls = Masked(self.MyArray) mms = mcls(self.a) assert isinstance(mms, mcls) assert_array_equal(mms.unmasked, self.a) assert_array_equal(mms.mask, np.zeros(mms.shape, bool)) @pytest.mark.parametrize('masked_array', [Masked, np.ma.MaskedArray]) def test_initialization_with_masked_values(self, masked_array): mcls = Masked(self.MyArray) ma = masked_array(np.asarray(self.a), mask=self.m) mms = mcls(ma) assert isinstance(mms, Masked) assert isinstance(mms, self.MyArray) assert_array_equal(mms.unmasked, self.a) assert_array_equal(mms.mask, self.m) def test_indirect_creation(self): assert self.MyArray not in Masked._masked_classes mms = Masked(self.a, mask=self.m) assert isinstance(mms, Masked) assert isinstance(mms, self.MyArray) assert_array_equal(mms.unmasked, self.a) assert_array_equal(mms.mask, self.m) assert self.MyArray in Masked._masked_classes assert Masked(self.MyArray) is type(mms) def test_can_initialize_with_masked_values(self): mcls = Masked(self.MyArray) mms = mcls(Masked(np.asarray(self.a), mask=self.m)) assert isinstance(mms, Masked) assert isinstance(mms, self.MyArray) assert_array_equal(mms.unmasked, self.a) assert_array_equal(mms.mask, self.m) def test_viewing(self): mms = Masked(self.a, mask=self.m) mms2 = mms.view() assert type(mms2) is mms.__class__ assert_masked_equal(mms2, mms) ma = mms.view(np.ndarray) assert type(ma) is MaskedNDArray assert_array_equal(ma.unmasked, self.a.view(np.ndarray)) assert_array_equal(ma.mask, self.m) class TestMaskedQuantityInitialization(TestMaskedArrayInitialization, QuantitySetup): def test_masked_quantity_class_init(self): # TODO: class definitions should be more easily accessible. mcls = Masked._masked_classes[self.a.__class__] # This is not a very careful test. mq = mcls([1., 2.], mask=[True, False], unit=u.s) assert mq.unit == u.s assert np.all(mq.value.unmasked == [1., 2.]) assert np.all(mq.value.mask == [True, False]) assert np.all(mq.mask == [True, False]) def test_masked_quantity_getting(self): mcls = Masked._masked_classes[self.a.__class__] MQ = Masked(Quantity) assert MQ is mcls def test_initialization_without_mask(self): # Default for not giving a mask should be False. MQ = Masked(Quantity) mq = MQ([1., 2.], u.s) assert mq.unit == u.s assert np.all(mq.value.unmasked == [1., 2.]) assert np.all(mq.mask == [False, False]) @pytest.mark.parametrize('masked_array', [Masked, np.ma.MaskedArray]) def test_initialization_with_masked_values(self, masked_array): MQ = Masked(Quantity) a = np.array([1., 2.]) m = np.array([True, False]) ma = masked_array(a, m) mq = MQ(ma) assert isinstance(mq, Masked) assert isinstance(mq, Quantity) assert_array_equal(mq.value.unmasked, a) assert_array_equal(mq.mask, m) class TestMaskSetting(ArraySetup): def test_whole_mask_setting_simple(self): ma = Masked(self.a) assert ma.mask.shape == ma.shape assert not ma.mask.any() ma.mask = True assert ma.mask.shape == ma.shape assert ma.mask.all() ma.mask = [[True], [False]] assert ma.mask.shape == ma.shape assert_array_equal(ma.mask, np.array([[True] * 3, [False] * 3])) ma.mask = self.mask_a assert ma.mask.shape == ma.shape assert_array_equal(ma.mask, self.mask_a) assert ma.mask is not self.mask_a assert np.may_share_memory(ma.mask, self.mask_a) def test_whole_mask_setting_structured(self): ma = Masked(self.sa) assert ma.mask.shape == ma.shape assert not ma.mask['a'].any() and not ma.mask['b'].any() ma.mask = True assert ma.mask.shape == ma.shape assert ma.mask['a'].all() and ma.mask['b'].all() ma.mask = [[True], [False]] assert ma.mask.shape == ma.shape assert_array_equal(ma.mask, np.array( [[(True, True)] * 2, [(False, False)] * 2], dtype=self.mask_sdt)) ma.mask = self.mask_sa assert ma.mask.shape == ma.shape assert_array_equal(ma.mask, self.mask_sa) assert ma.mask is not self.mask_sa assert np.may_share_memory(ma.mask, self.mask_sa) @pytest.mark.parametrize('item', VARIOUS_ITEMS) def test_part_mask_setting(self, item): ma = Masked(self.a) ma.mask[item] = True expected = np.zeros(ma.shape, bool) expected[item] = True assert_array_equal(ma.mask, expected) ma.mask[item] = False assert_array_equal(ma.mask, np.zeros(ma.shape, bool)) # Mask propagation mask = np.zeros(self.a.shape, bool) ma = Masked(self.a, mask) ma.mask[item] = True assert np.may_share_memory(ma.mask, mask) assert_array_equal(ma.mask, mask) @pytest.mark.parametrize('item', ['a'] + VARIOUS_ITEMS) def test_part_mask_setting_structured(self, item): ma = Masked(self.sa) ma.mask[item] = True expected = np.zeros(ma.shape, self.mask_sdt) expected[item] = True assert_array_equal(ma.mask, expected) ma.mask[item] = False assert_array_equal(ma.mask, np.zeros(ma.shape, self.mask_sdt)) # Mask propagation mask = np.zeros(self.sa.shape, self.mask_sdt) ma = Masked(self.sa, mask) ma.mask[item] = True assert np.may_share_memory(ma.mask, mask) assert_array_equal(ma.mask, mask) # Following are tests where we trust the initializer works. class MaskedArraySetup(ArraySetup): @classmethod def setup_class(self): super().setup_class() self.ma = Masked(self.a, mask=self.mask_a) self.mb = Masked(self.b, mask=self.mask_b) self.mc = Masked(self.c, mask=self.mask_c) self.msa = Masked(self.sa, mask=self.mask_sa) self.msb = Masked(self.sb, mask=self.mask_sb) class TestViewing(MaskedArraySetup): def test_viewing_as_new_type(self): ma2 = self.ma.view(type(self.ma)) assert_masked_equal(ma2, self.ma) ma3 = self.ma.view() assert_masked_equal(ma3, self.ma) def test_viewing_as_new_dtype(self): # Not very meaningful, but possible... ma2 = self.ma.view('c8') assert_array_equal(ma2.unmasked, self.a.view('c8')) assert_array_equal(ma2.mask, self.mask_a) @pytest.mark.parametrize('new_dtype', ['2f4', 'f8,f8,f8']) def test_viewing_as_new_dtype_not_implemented(self, new_dtype): # But cannot (yet) view in way that would need to create a new mask, # even though that view is possible for a regular array. check = self.a.view(new_dtype) with pytest.raises(NotImplementedError, match='different.*size'): self.ma.view(check.dtype) def test_viewing_as_something_impossible(self): with pytest.raises(TypeError): # Use intp to ensure have the same size as object, # otherwise we get a different error message Masked(np.array([1, 2], dtype=np.intp)).view(Masked) class TestMaskedArrayCopyFilled(MaskedArraySetup): def test_copy(self): ma_copy = self.ma.copy() assert type(ma_copy) is type(self.ma) assert_array_equal(ma_copy.unmasked, self.ma.unmasked) assert_array_equal(ma_copy.mask, self.ma.mask) assert not np.may_share_memory(ma_copy.unmasked, self.ma.unmasked) assert not np.may_share_memory(ma_copy.mask, self.ma.mask) @pytest.mark.parametrize('fill_value', (0, 1)) def test_filled(self, fill_value): fill_value = fill_value * getattr(self.a, 'unit', 1) expected = self.a.copy() expected[self.ma.mask] = fill_value result = self.ma.filled(fill_value) assert_array_equal(expected, result) def test_filled_no_fill_value(self): with pytest.raises(TypeError, match='missing 1 required'): self.ma.filled() @pytest.mark.parametrize('fill_value', [(0, 1), (-1, -1)]) def test_filled_structured(self, fill_value): fill_value = np.array(fill_value, dtype=self.sdt) if hasattr(self.sa, 'unit'): fill_value = fill_value << self.sa.unit expected = self.sa.copy() expected['a'][self.msa.mask['a']] = fill_value['a'] expected['b'][self.msa.mask['b']] = fill_value['b'] result = self.msa.filled(fill_value) assert_array_equal(expected, result) def test_flat(self): ma_copy = self.ma.copy() ma_flat = ma_copy.flat # Check that single item keeps class and mask ma_flat1 = ma_flat[1] assert ma_flat1.unmasked == self.a.flat[1] assert ma_flat1.mask == self.mask_a.flat[1] # As well as getting items via iteration. assert all((ma.unmasked == a and ma.mask == m) for (ma, a, m) in zip(self.ma.flat, self.a.flat, self.mask_a.flat)) # check that flat works like a view of the real array ma_flat[1] = self.b[1] assert ma_flat[1] == self.b[1] assert ma_copy[0, 1] == self.b[1] class TestMaskedQuantityCopyFilled(TestMaskedArrayCopyFilled, QuantitySetup): pass class TestMaskedLongitudeCopyFilled(TestMaskedArrayCopyFilled, LongitudeSetup): pass class TestMaskedArrayShaping(MaskedArraySetup): def test_reshape(self): ma_reshape = self.ma.reshape((6,)) expected_data = self.a.reshape((6,)) expected_mask = self.mask_a.reshape((6,)) assert ma_reshape.shape == expected_data.shape assert_array_equal(ma_reshape.unmasked, expected_data) assert_array_equal(ma_reshape.mask, expected_mask) def test_shape_setting(self): ma_reshape = self.ma.copy() ma_reshape.shape = 6, expected_data = self.a.reshape((6,)) expected_mask = self.mask_a.reshape((6,)) assert ma_reshape.shape == expected_data.shape assert_array_equal(ma_reshape.unmasked, expected_data) assert_array_equal(ma_reshape.mask, expected_mask) def test_shape_setting_failure(self): ma = self.ma.copy() with pytest.raises(ValueError, match='cannot reshape'): ma.shape = 5, assert ma.shape == self.ma.shape assert ma.mask.shape == self.ma.shape # Here, mask can be reshaped but array cannot. ma2 = Masked(np.broadcast_to([[1.], [2.]], self.a.shape), mask=self.mask_a) with pytest.raises(AttributeError, match='ncompatible shape'): ma2.shape = 6, assert ma2.shape == self.ma.shape assert ma2.mask.shape == self.ma.shape # Here, array can be reshaped but mask cannot. ma3 = Masked(self.a.copy(), mask=np.broadcast_to([[True], [False]], self.mask_a.shape)) with pytest.raises(AttributeError, match='ncompatible shape'): ma3.shape = 6, assert ma3.shape == self.ma.shape assert ma3.mask.shape == self.ma.shape def test_ravel(self): ma_ravel = self.ma.ravel() expected_data = self.a.ravel() expected_mask = self.mask_a.ravel() assert ma_ravel.shape == expected_data.shape assert_array_equal(ma_ravel.unmasked, expected_data) assert_array_equal(ma_ravel.mask, expected_mask) def test_transpose(self): ma_transpose = self.ma.transpose() expected_data = self.a.transpose() expected_mask = self.mask_a.transpose() assert ma_transpose.shape == expected_data.shape assert_array_equal(ma_transpose.unmasked, expected_data) assert_array_equal(ma_transpose.mask, expected_mask) def test_iter(self): for ma, d, m in zip(self.ma, self.a, self.mask_a): assert_array_equal(ma.unmasked, d) assert_array_equal(ma.mask, m) class MaskedItemTests(MaskedArraySetup): @pytest.mark.parametrize('item', VARIOUS_ITEMS) def test_getitem(self, item): ma_part = self.ma[item] expected_data = self.a[item] expected_mask = self.mask_a[item] assert_array_equal(ma_part.unmasked, expected_data) assert_array_equal(ma_part.mask, expected_mask) @pytest.mark.parametrize('item', ['a'] + VARIOUS_ITEMS) def test_getitem_structured(self, item): ma_part = self.msa[item] expected_data = self.sa[item] expected_mask = self.mask_sa[item] assert_array_equal(ma_part.unmasked, expected_data) assert_array_equal(ma_part.mask, expected_mask) @pytest.mark.parametrize('indices,axis', [ ([0, 1], 1), ([0, 1], 0), ([0, 1], None), ([[0, 1], [2, 3]], None)]) def test_take(self, indices, axis): ma_take = self.ma.take(indices, axis=axis) expected_data = self.a.take(indices, axis=axis) expected_mask = self.mask_a.take(indices, axis=axis) assert_array_equal(ma_take.unmasked, expected_data) assert_array_equal(ma_take.mask, expected_mask) ma_take2 = np.take(self.ma, indices, axis=axis) assert_masked_equal(ma_take2, ma_take) @pytest.mark.parametrize('item', VARIOUS_ITEMS) @pytest.mark.parametrize('mask', [None, True, False]) def test_setitem(self, item, mask): base = self.ma.copy() expected_data = self.a.copy() expected_mask = self.mask_a.copy() value = self.a[0, 0] if mask is None else Masked(self.a[0, 0], mask) base[item] = value expected_data[item] = value if mask is None else value.unmasked expected_mask[item] = False if mask is None else value.mask assert_array_equal(base.unmasked, expected_data) assert_array_equal(base.mask, expected_mask) @pytest.mark.parametrize('item', ['a'] + VARIOUS_ITEMS) @pytest.mark.parametrize('mask', [None, True, False]) def test_setitem_structured(self, item, mask): base = self.msa.copy() expected_data = self.sa.copy() expected_mask = self.mask_sa.copy() value = self.sa['b'] if item == 'a' else self.sa[0, 0] if mask is not None: value = Masked(value, mask) base[item] = value expected_data[item] = value if mask is None else value.unmasked expected_mask[item] = False if mask is None else value.mask assert_array_equal(base.unmasked, expected_data) assert_array_equal(base.mask, expected_mask) @pytest.mark.parametrize('item', VARIOUS_ITEMS) def test_setitem_np_ma_masked(self, item): base = self.ma.copy() expected_mask = self.mask_a.copy() base[item] = np.ma.masked expected_mask[item] = True assert_array_equal(base.unmasked, self.a) assert_array_equal(base.mask, expected_mask) class TestMaskedArrayItems(MaskedItemTests): @classmethod def setup_class(self): super().setup_class() self.d = np.array(['aa', 'bb']) self.mask_d = np.array([True, False]) self.md = Masked(self.d, self.mask_d) # Quantity, Longitude cannot hold strings. def test_getitem_strings(self): md = self.md.copy() md0 = md[0] assert md0.unmasked == self.d[0] assert md0.mask md_all = md[:] assert_masked_equal(md_all, md) def test_setitem_strings_np_ma_masked(self): md = self.md.copy() md[1] = np.ma.masked assert_array_equal(md.unmasked, self.d) assert_array_equal(md.mask, np.ones(2, bool)) class TestMaskedQuantityItems(MaskedItemTests, QuantitySetup): pass class TestMaskedLongitudeItems(MaskedItemTests, LongitudeSetup): pass class MaskedOperatorTests(MaskedArraySetup): @pytest.mark.parametrize('op', (operator.add, operator.sub)) def test_add_subtract(self, op): mapmb = op(self.ma, self.mb) expected_data = op(self.a, self.b) expected_mask = (self.ma.mask | self.mb.mask) # Note: assert_array_equal also checks type, i.e., that, e.g., # Longitude decays into an Angle. assert_array_equal(mapmb.unmasked, expected_data) assert_array_equal(mapmb.mask, expected_mask) @pytest.mark.parametrize('op', (operator.eq, operator.ne)) def test_equality(self, op): mapmb = op(self.ma, self.mb) expected_data = op(self.a, self.b) expected_mask = (self.ma.mask | self.mb.mask) # Note: assert_array_equal also checks type, i.e., that boolean # output is represented as plain Masked ndarray. assert_array_equal(mapmb.unmasked, expected_data) assert_array_equal(mapmb.mask, expected_mask) def test_not_implemented(self): with pytest.raises(TypeError): self.ma > 'abc' @pytest.mark.parametrize('different_names', [False, True]) @pytest.mark.parametrize('op', (operator.eq, operator.ne)) def test_structured_equality(self, op, different_names): msb = self.msb if different_names: msb = msb.astype([(f'different_{name}', dt) for name, dt in msb.dtype.fields.items()]) mapmb = op(self.msa, self.msb) # Expected is a bit tricky here: only unmasked fields count expected_data = np.ones(mapmb.shape, bool) expected_mask = np.ones(mapmb.shape, bool) for field in self.sdt.names: fa, mfa = self.sa[field], self.mask_sa[field] fb, mfb = self.sb[field], self.mask_sb[field] mfequal = mfa | mfb fequal = (fa == fb) | mfequal expected_data &= fequal expected_mask &= mfequal if op is operator.ne: expected_data = ~expected_data # Note: assert_array_equal also checks type, i.e., that boolean # output is represented as plain Masked ndarray. assert_array_equal(mapmb.unmasked, expected_data) assert_array_equal(mapmb.mask, expected_mask) def test_matmul(self): result = self.ma.T @ self.ma assert_array_equal(result.unmasked, self.a.T @ self.a) mask1 = np.any(self.mask_a, axis=0) expected_mask = np.logical_or.outer(mask1, mask1) assert_array_equal(result.mask, expected_mask) result2 = self.ma.T @ self.a assert_array_equal(result2.unmasked, self.a.T @ self.a) expected_mask2 = np.logical_or.outer(mask1, np.zeros(3, bool)) assert_array_equal(result2.mask, expected_mask2) result3 = self.a.T @ self.ma assert_array_equal(result3.unmasked, self.a.T @ self.a) expected_mask3 = np.logical_or.outer(np.zeros(3, bool), mask1) assert_array_equal(result3.mask, expected_mask3) def test_matvec(self): result = self.ma @ self.mb assert np.all(result.mask) assert_array_equal(result.unmasked, self.a @ self.b) # Just using the masked vector still has all elements masked. result2 = self.a @ self.mb assert np.all(result2.mask) assert_array_equal(result2.unmasked, self.a @ self.b) new_ma = self.ma.copy() new_ma.mask[0, 0] = False result3 = new_ma @ self.b assert_array_equal(result3.unmasked, self.a @ self.b) assert_array_equal(result3.mask, new_ma.mask.any(-1)) def test_vecmat(self): result = self.mb @ self.ma.T assert np.all(result.mask) assert_array_equal(result.unmasked, self.b @ self.a.T) result2 = self.b @ self.ma.T assert np.all(result2.mask) assert_array_equal(result2.unmasked, self.b @ self.a.T) new_ma = self.ma.T.copy() new_ma.mask[0, 0] = False result3 = self.b @ new_ma assert_array_equal(result3.unmasked, self.b @ self.a.T) assert_array_equal(result3.mask, new_ma.mask.any(0)) def test_vecvec(self): result = self.mb @ self.mb assert result.shape == () assert result.mask assert result.unmasked == self.b @ self.b mb_no_mask = Masked(self.b, False) result2 = mb_no_mask @ mb_no_mask assert not result2.mask class TestMaskedArrayOperators(MaskedOperatorTests): # Some further tests that use strings, which are not useful for Quantity. @pytest.mark.parametrize('op', (operator.eq, operator.ne)) def test_equality_strings(self, op): m1 = Masked(np.array(['a', 'b', 'c']), mask=[True, False, False]) m2 = Masked(np.array(['a', 'b', 'd']), mask=[False, False, False]) result = op(m1, m2) assert_array_equal(result.unmasked, op(m1.unmasked, m2.unmasked)) assert_array_equal(result.mask, m1.mask | m2.mask) result2 = op(m1, m2.unmasked) assert_masked_equal(result2, result) def test_not_implemented(self): with pytest.raises(TypeError): Masked(['a', 'b']) > object() class TestMaskedQuantityOperators(MaskedOperatorTests, QuantitySetup): pass class TestMaskedLongitudeOperators(MaskedOperatorTests, LongitudeSetup): pass class TestMaskedArrayMethods(MaskedArraySetup): def test_round(self): # Goes via ufunc, hence easy. mrc = self.mc.round() expected = Masked(self.c.round(), self.mask_c) assert_masked_equal(mrc, expected) @pytest.mark.parametrize('axis', (0, 1, None)) def test_sum(self, axis): ma_sum = self.ma.sum(axis) expected_data = self.a.sum(axis) expected_mask = self.ma.mask.any(axis) assert_array_equal(ma_sum.unmasked, expected_data) assert_array_equal(ma_sum.mask, expected_mask) @pytest.mark.parametrize('axis', (0, 1, None)) def test_cumsum(self, axis): ma_sum = self.ma.cumsum(axis) expected_data = self.a.cumsum(axis) mask = self.mask_a if axis is None: mask = mask.ravel() expected_mask = np.logical_or.accumulate(mask, axis=axis) assert_array_equal(ma_sum.unmasked, expected_data) assert_array_equal(ma_sum.mask, expected_mask) @pytest.mark.parametrize('axis', (0, 1, None)) def test_mean(self, axis): ma_mean = self.ma.mean(axis) filled = self.a.copy() filled[self.mask_a] = 0. count = 1 - self.ma.mask.astype(int) expected_data = filled.sum(axis) / count.sum(axis) expected_mask = self.ma.mask.all(axis) assert_array_equal(ma_mean.unmasked, expected_data) assert_array_equal(ma_mean.mask, expected_mask) def test_mean_int16(self): ma = self.ma.astype('i2') ma_mean = ma.mean() assert ma_mean.dtype == 'f8' expected = ma.astype('f8').mean() assert_masked_equal(ma_mean, expected) def test_mean_float16(self): ma = self.ma.astype('f2') ma_mean = ma.mean() assert ma_mean.dtype == 'f2' expected = self.ma.mean().astype('f2') assert_masked_equal(ma_mean, expected) def test_mean_inplace(self): expected = self.ma.mean(1) out = Masked(np.zeros_like(expected.unmasked)) result = self.ma.mean(1, out=out) assert result is out assert_masked_equal(out, expected) @pytest.mark.filterwarnings("ignore:.*true_divide.*") @pytest.mark.parametrize('axis', (0, 1, None)) def test_var(self, axis): ma_var = self.ma.var(axis) filled = (self.a - self.ma.mean(axis, keepdims=True))**2 filled[self.mask_a] = 0. count = (1 - self.ma.mask.astype(int)).sum(axis) expected_data = filled.sum(axis) / count expected_mask = self.ma.mask.all(axis) assert_array_equal(ma_var.unmasked, expected_data) assert_array_equal(ma_var.mask, expected_mask) ma_var1 = self.ma.var(axis, ddof=1) expected_data1 = filled.sum(axis) / (count - 1) expected_mask1 = self.ma.mask.all(axis) | (count <= 1) assert_array_equal(ma_var1.unmasked, expected_data1) assert_array_equal(ma_var1.mask, expected_mask1) ma_var5 = self.ma.var(axis, ddof=5) assert np.all(~np.isfinite(ma_var5.unmasked)) assert ma_var5.mask.all() def test_var_int16(self): ma = self.ma.astype('i2') ma_var = ma.var() assert ma_var.dtype == 'f8' expected = ma.astype('f8').var() assert_masked_equal(ma_var, expected) def test_std(self): ma_std = self.ma.std(1, ddof=1) ma_var1 = self.ma.var(1, ddof=1) expected = np.sqrt(ma_var1) assert_masked_equal(ma_std, expected) def test_std_inplace(self): expected = self.ma.std(1, ddof=1) out = Masked(np.zeros_like(expected.unmasked)) result = self.ma.std(1, ddof=1, out=out) assert result is out assert_masked_equal(result, expected) @pytest.mark.parametrize('axis', (0, 1, None)) def test_min(self, axis): ma_min = self.ma.min(axis) filled = self.a.copy() filled[self.mask_a] = self.a.max() expected_data = filled.min(axis) assert_array_equal(ma_min.unmasked, expected_data) assert not np.any(ma_min.mask) @pytest.mark.parametrize('axis', (0, 1, None)) def test_max(self, axis): ma_max = self.ma.max(axis) filled = self.a.copy() filled[self.mask_a] = self.a.min() expected_data = filled.max(axis) assert_array_equal(ma_max.unmasked, expected_data) assert not np.any(ma_max.mask) @pytest.mark.parametrize('axis', (0, 1, None)) def test_argmin(self, axis): ma_argmin = self.ma.argmin(axis) filled = self.a.copy() filled[self.mask_a] = self.a.max() expected_data = filled.argmin(axis) assert_array_equal(ma_argmin, expected_data) def test_argmin_only_one_unmasked_element(self): # Regression test for example from @taldcroft at # https://github.com/astropy/astropy/pull/11127#discussion_r600864559 ma = Masked(data=[1, 2], mask=[True, False]) assert ma.argmin() == 1 @pytest.mark.parametrize('axis', (0, 1, None)) def test_argmax(self, axis): ma_argmax = self.ma.argmax(axis) filled = self.a.copy() filled[self.mask_a] = self.a.min() expected_data = filled.argmax(axis) assert_array_equal(ma_argmax, expected_data) @pytest.mark.parametrize('axis', (0, 1, None)) def test_argsort(self, axis): ma_argsort = self.ma.argsort(axis) filled = self.a.copy() filled[self.mask_a] = self.a.max() * 1.1 expected_data = filled.argsort(axis) assert_array_equal(ma_argsort, expected_data) @pytest.mark.parametrize('order', [None, 'a', ('a', 'b'), ('b', 'a')]) @pytest.mark.parametrize('axis', [0, 1]) def test_structured_argsort(self, axis, order): ma_argsort = self.msa.argsort(axis, order=order) filled = self.msa.filled(fill_value=np.array((np.inf, np.inf), dtype=self.sdt)) expected_data = filled.argsort(axis, order=order) assert_array_equal(ma_argsort, expected_data) def test_argsort_error(self): with pytest.raises(ValueError, match='when the array has no fields'): self.ma.argsort(axis=0, order='a') @pytest.mark.parametrize('axis', (0, 1)) def test_sort(self, axis): ma_sort = self.ma.copy() ma_sort.sort(axis) indices = self.ma.argsort(axis) expected_data = np.take_along_axis(self.ma.unmasked, indices, axis) expected_mask = np.take_along_axis(self.ma.mask, indices, axis) assert_array_equal(ma_sort.unmasked, expected_data) assert_array_equal(ma_sort.mask, expected_mask) @pytest.mark.parametrize('kth', [1, 3]) def test_argpartition(self, kth): ma = self.ma.ravel() ma_argpartition = ma.argpartition(kth) partitioned = ma[ma_argpartition] assert (partitioned[:kth] < partitioned[kth]).all() assert (partitioned[kth:] >= partitioned[kth]).all() if partitioned[kth].mask: assert all(partitioned.mask[kth:]) else: assert not any(partitioned.mask[:kth]) @pytest.mark.parametrize('kth', [1, 3]) def test_partition(self, kth): partitioned = self.ma.flatten() partitioned.partition(kth) assert (partitioned[:kth] < partitioned[kth]).all() assert (partitioned[kth:] >= partitioned[kth]).all() if partitioned[kth].mask: assert all(partitioned.mask[kth:]) else: assert not any(partitioned.mask[:kth]) def test_all_explicit(self): a1 = np.array([[1., 2.], [3., 4.]]) a2 = np.array([[1., 0.], [3., 4.]]) if self._data_cls is not np.ndarray: a1 = self._data_cls(a1, self.a.unit) a2 = self._data_cls(a2, self.a.unit) ma1 = Masked(a1, mask=[[False, False], [True, True]]) ma2 = Masked(a2, mask=[[False, True], [False, True]]) ma1_eq_ma2 = ma1 == ma2 assert_array_equal(ma1_eq_ma2.unmasked, np.array([[True, False], [True, True]])) assert_array_equal(ma1_eq_ma2.mask, np.array([[False, True], [True, True]])) assert ma1_eq_ma2.all() assert not (ma1 != ma2).all() ma_eq1 = ma1_eq_ma2.all(1) assert_array_equal(ma_eq1.mask, np.array([False, True])) assert bool(ma_eq1[0]) is True assert bool(ma_eq1[1]) is False ma_eq0 = ma1_eq_ma2.all(0) assert_array_equal(ma_eq0.mask, np.array([False, True])) assert bool(ma_eq1[0]) is True assert bool(ma_eq1[1]) is False @pytest.mark.parametrize('method', ['any', 'all']) @pytest.mark.parametrize('array,axis', [ ('a', 0), ('a', 1), ('a', None), ('b', None), ('c', 0), ('c', 1), ('c', None)]) def test_all_and_any(self, array, axis, method): ma = getattr(self, 'm'+array) ma_eq = ma == ma ma_all_or_any = getattr(ma_eq, method)(axis=axis) filled = ma_eq.unmasked.copy() filled[ma_eq.mask] = method == 'all' a_all_or_any = getattr(filled, method)(axis=axis) all_masked = ma.mask.all(axis) assert_array_equal(ma_all_or_any.mask, all_masked) assert_array_equal(ma_all_or_any.unmasked, a_all_or_any) # interpretation as bool as_bool = [bool(a) for a in ma_all_or_any.ravel()] expected = [bool(a) for a in (a_all_or_any & ~all_masked).ravel()] assert as_bool == expected def test_any_inplace(self): ma_eq = self.ma == self.ma expected = ma_eq.any(1) out = Masked(np.zeros_like(expected.unmasked)) result = ma_eq.any(1, out=out) assert result is out assert_masked_equal(result, expected) @pytest.mark.parametrize('offset', (0, 1)) def test_diagonal(self, offset): mda = self.ma.diagonal(offset=offset) expected = Masked(self.a.diagonal(offset=offset), self.mask_a.diagonal(offset=offset)) assert_masked_equal(mda, expected) @pytest.mark.parametrize('offset', (0, 1)) def test_trace(self, offset): mta = self.ma.trace(offset=offset) expected = Masked(self.a.trace(offset=offset), self.mask_a.trace(offset=offset, dtype=bool)) assert_masked_equal(mta, expected) def test_clip(self): maclip = self.ma.clip(self.b, self.c) expected = Masked(self.a.clip(self.b, self.c), self.mask_a) assert_masked_equal(maclip, expected) def test_clip_masked_min_max(self): maclip = self.ma.clip(self.mb, self.mc) # Need to be careful with min, max because of Longitude, which wraps. dmax = np.maximum(np.maximum(self.a, self.b), self.c).max() dmin = np.minimum(np.minimum(self.a, self.b), self.c).min() expected = Masked(self.a.clip(self.mb.filled(dmin), self.mc.filled(dmax)), mask=self.mask_a) assert_masked_equal(maclip, expected) class TestMaskedQuantityMethods(TestMaskedArrayMethods, QuantitySetup): pass class TestMaskedLongitudeMethods(TestMaskedArrayMethods, LongitudeSetup): pass class TestMaskedArrayProductMethods(MaskedArraySetup): # These cannot work on Quantity, so done separately @pytest.mark.parametrize('axis', (0, 1, None)) def test_prod(self, axis): ma_sum = self.ma.prod(axis) expected_data = self.a.prod(axis) expected_mask = self.ma.mask.any(axis) assert_array_equal(ma_sum.unmasked, expected_data) assert_array_equal(ma_sum.mask, expected_mask) @pytest.mark.parametrize('axis', (0, 1, None)) def test_cumprod(self, axis): ma_sum = self.ma.cumprod(axis) expected_data = self.a.cumprod(axis) mask = self.mask_a if axis is None: mask = mask.ravel() expected_mask = np.logical_or.accumulate(mask, axis=axis) assert_array_equal(ma_sum.unmasked, expected_data) assert_array_equal(ma_sum.mask, expected_mask) def test_masked_str_explicit(): sa = np.array([(1., 2.), (3., 4.)], dtype='f8,f8') msa = Masked(sa, [(False, True), (False, False)]) assert str(msa) == "[(1., ——) (3., 4.)]" assert str(msa[0]) == "(1., ——)" assert str(msa[1]) == "(3., 4.)" with np.printoptions(precision=3, floatmode='fixed'): assert str(msa) == "[(1.000, ———) (3.000, 4.000)]" def test_masked_repr_explicit(): # Use explicit endianness to ensure tests pass on all architectures sa = np.array([(1., 2.), (3., 4.)], dtype='>f8,>f8') msa = Masked(sa, [(False, True), (False, False)]) assert repr(msa) == ("MaskedNDArray([(1., ——), (3., 4.)], " "dtype=[('f0', '>f8'), ('f1', '>f8')])") assert repr(msa[0]) == ("MaskedNDArray((1., ——), " "dtype=[('f0', '>f8'), ('f1', '>f8')])") assert repr(msa[1]) == ("MaskedNDArray((3., 4.), " "dtype=[('f0', '>f8'), ('f1', '>f8')])") def test_masked_repr_summary(): ma = Masked(np.arange(15.), mask=[True]+[False]*14) with np.printoptions(threshold=2): assert repr(ma) == ( "MaskedNDArray([———, 1., 2., ..., 12., 13., 14.])") def test_masked_repr_nodata(): assert repr(Masked([])) == "MaskedNDArray([], dtype=float64)" class TestMaskedArrayRepr(MaskedArraySetup): def test_array_str(self): # very blunt check they work at all. str(self.ma) str(self.mb) str(self.mc) str(self.msa) str(self.msb) def test_scalar_str(self): assert self.mb[0].shape == () str(self.mb[0]) assert self.msb[0].shape == () str(self.msb[0]) def test_array_repr(self): repr(self.ma) repr(self.mb) repr(self.mc) repr(self.msa) repr(self.msb) def test_scalar_repr(self): repr(self.mb[0]) repr(self.msb[0]) class TestMaskedQuantityRepr(TestMaskedArrayRepr, QuantitySetup): pass class TestMaskedRecarray(MaskedArraySetup): @classmethod def setup_class(self): super().setup_class() self.ra = self.sa.view(np.recarray) self.mra = Masked(self.ra, mask=self.mask_sa) def test_recarray_setup(self): assert isinstance(self.mra, Masked) assert isinstance(self.mra, np.recarray) assert np.all(self.mra.unmasked == self.ra) assert np.all(self.mra.mask == self.mask_sa) assert_array_equal(self.mra.view(np.ndarray), self.sa) assert isinstance(self.mra.a, Masked) assert_array_equal(self.mra.a.unmasked, self.sa['a']) assert_array_equal(self.mra.a.mask, self.mask_sa['a']) def test_recarray_setting(self): mra = self.mra.copy() mra.a = self.msa['b'] assert_array_equal(mra.a.unmasked, self.msa['b'].unmasked) assert_array_equal(mra.a.mask, self.msa['b'].mask) @pytest.mark.parametrize('attr', [0, 'a']) def test_recarray_field_getting(self, attr): mra_a = self.mra.field(attr) assert isinstance(mra_a, Masked) assert_array_equal(mra_a.unmasked, self.sa['a']) assert_array_equal(mra_a.mask, self.mask_sa['a']) @pytest.mark.parametrize('attr', [0, 'a']) def test_recarray_field_setting(self, attr): mra = self.mra.copy() mra.field(attr, self.msa['b']) assert_array_equal(mra.a.unmasked, self.msa['b'].unmasked) assert_array_equal(mra.a.mask, self.msa['b'].mask)
pllim/astropy
astropy/utils/masked/tests/test_masked.py
astropy/wcs/wcsapi/wrappers/sliced_wcs.py
# Copyright 2020 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """MongoDBWriter""" import json from urllib.parse import urlparse import tensorflow as tf from tensorflow_io.python.ops import core_ops from tensorflow_io.python.experimental import serialization_ops class MongoDBWriter: """Write documents to mongoDB. The writer can be used to store documents in mongoDB while dealing with tensorflow based models and inference outputs. Without loss of generality, consider an ML model that is being used for inference. The outputs of inference can be modelled into a structured record by enriching the schema with additional information( for ex: metadata about input data and the semantics of the inference etc.) and can be stored in mongo collections for persistence or future analysis. To make a connection and write the documents to the mongo collections, the `tfio.experimental.mongodb.MongoDBWriter` API can be used. Example: >>> URI = "mongodb://mongoadmin:default_password@localhost:27017" >>> DATABASE = "tfiodb" >>> COLLECTION = "test" >>> writer = tfio.experimental.mongodb.MongoDBWriter( uri=URI, database=DATABASE, collection=COLLECTION ) >>> for i in range(1000): ... data = {"key{}".format(i): "value{}".format(i)} ... writer.write(data) """ def __init__(self, uri, database, collection): """Initialize the dataset with the following parameters Args: uri: A string, representing the uri of the mongo server or replicaset. To connect to a MongoDB server with username and password based authentication, the following uri pattern can be used. Ex: `"mongodb://mongoadmin:default_password@localhost:27017"`. Connecting to a replica set is much like connecting to a standalone MongoDB server. Simply specify the replica set name using the `?replicaSet=myreplset` URI option. Ex: "mongodb://host01:27017,host02:27017,host03:27017/?replicaSet=myreplset" Connection to a secure cluster via CA certs can be achieved by setting the respective TLS options to the URI. Ex: "mongodb://host01:27017/?tls=true&sslCertificateAuthorityFile=/opt/ca.pem" Additional information on writing uri's can be found here: - [libmongoc uri docs](http://mongoc.org/libmongoc/current/mongoc_uri_t.html) - [mongodb uri docs](https://docs.mongodb.com/manual/reference/connection-string/) database: A string, representing the database in the standalone MongoDB server or a replica set to connect to. collection: A string, representing the collection from which the documents have to be retrieved. """ self.uri = uri self.database = database self.collection = collection self.resource = core_ops.io_mongo_db_writable_init( uri=self.uri, database=self.database, collection=self.collection, ) def write(self, doc): """Insert a single json document""" core_ops.io_mongo_db_writable_write( resource=self.resource, record=json.dumps(doc) ) def _delete_many(self, doc): """Delete all matching documents""" core_ops.io_mongo_db_writable_delete_many( resource=self.resource, record=json.dumps(doc) )
# Copyright 2018 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. # ============================================================================== """test ffmpeg dataset""" import os import sys import pytest import tensorflow as tf import tensorflow_io as tfio @pytest.fixture(name="video_data", scope="module") def fixture_video_data(): """fixture_video_data""" path = "file://" + os.path.join( os.path.dirname(os.path.abspath(__file__)), "test_video", "small.mp4" ) # TODO: get raw value value = tf.zeros((166, 320, 560, 3), tf.uint8) return path, value @pytest.mark.parametrize( ("io_dataset_func"), [ pytest.param(lambda f: tfio.IODataset.graph(tf.uint8).from_ffmpeg(f, "v:0")), pytest.param(lambda f: tfio.IODataset.from_ffmpeg(f, "v:0")), ], ids=["from_ffmpeg", "from_ffmpeg(eager)"], ) def test_video_io_dataset(video_data, io_dataset_func): """test_video_io_dataset""" video_path, video_value = video_data video_dataset = io_dataset_func(video_path) i = 0 for value in video_dataset: assert video_value[i].shape == value.shape i += 1 assert i == 166 video_dataset = io_dataset_func(video_path).batch(2) i = 0 for value in video_dataset: assert video_value[i : i + 2].shape == value.shape i += 2 assert i == 166 @pytest.mark.parametrize( ("io_dataset_func"), [pytest.param(lambda f: tfio.IODataset.graph(tf.uint8).from_ffmpeg(f, "v:0"),)], ids=["from_ffmpeg"], ) def test_video_io_dataset_with_dataset(video_data, io_dataset_func): """test_video_io_dataset_with_dataset""" video_path, video_value = video_data filename_dataset = tf.data.Dataset.from_tensor_slices([video_path, video_path]) position_dataset = tf.data.Dataset.from_tensor_slices( [tf.constant(50, tf.int64), tf.constant(100, tf.int64)] ) dataset = tf.data.Dataset.zip((filename_dataset, position_dataset)) # Note: @tf.function is actually not needed, as tf.data.Dataset # will automatically wrap the `func` into a graph anyway. # The following is purely for explanation purposes. # Return: an embedded dataset (in an outer dataset) for position:position+100 @tf.function def func(filename, position): video_dataset = io_dataset_func(filename) return video_dataset.skip(position).take(10) dataset = dataset.map(func) item = 0 # Notice video_dataset in dataset: for video_dataset in dataset: position = 50 if item == 0 else 100 i = 0 for value in video_dataset: assert video_value[position + i].shape == value.shape i += 1 assert i == 10 item += 1 assert item == 2
tensorflow/io
tests/test_video.py
tensorflow_io/python/experimental/mongodb_writer_ops.py
# -*- coding: utf-8 -*- import re import six import xmlrpclib from github import Github from urlparse import urlparse from fixtures.pytest_store import store from cfme.utils import classproperty, conf, version from cfme.utils.bz import Bugzilla from cfme.utils.log import logger class Blocker(object): """Base class for all blockers REQUIRED THING! Any subclass' constructors must accept kwargs and after POPping the values required for the blocker's operation, `call to ``super`` with ``**kwargs`` must be done! Failing to do this will render some of the functionality disabled ;). """ blocks = False kwargs = {} def __init__(self, **kwargs): self.forced_streams = kwargs.pop("forced_streams", []) self.__dict__["kwargs"] = kwargs @property def url(self): raise NotImplementedError('You need to implement .url') @classmethod def all_blocker_engines(cls): """Return mapping of name:class of all the blocker engines in this module. Having this as a separate function will later enable to scatter the engines across modules in case of extraction into a separate library. """ return { 'GH': GH, 'BZ': BZ, } @classmethod def parse(cls, blocker, **kwargs): """Create a blocker object from some representation""" if isinstance(blocker, cls): return blocker elif isinstance(blocker, six.string_types): if "#" in blocker: # Generic blocker engine, spec = blocker.split("#", 1) try: engine_class = cls.all_blocker_engines()[engine] except KeyError: raise ValueError( "{} is a wrong engine specification for blocker! ({} available)".format( engine, ", ".join(cls.all_blocker_engines().keys()))) return engine_class(spec, **kwargs) # EXTEND: If someone has other ideas, put them here raise ValueError("Could not parse blocker {}".format(blocker)) else: raise ValueError("Wrong specification of the blockers!") class GH(Blocker): DEFAULT_REPOSITORY = conf.env.get("github", {}).get("default_repo") _issue_cache = {} @classproperty def github(cls): if not hasattr(cls, "_github"): token = conf.env.get("github", {}).get("token") if token is not None: cls._github = Github(token) else: cls._github = Github() # Without auth max 60 req/hr return cls._github def __init__(self, description, **kwargs): super(GH, self).__init__(**kwargs) self._repo = None self.issue = None self.upstream_only = kwargs.get('upstream_only', True) self.since = kwargs.get('since') self.until = kwargs.get('until') if isinstance(description, (list, tuple)): try: self.repo, self.issue = description self.issue = int(self.issue) except ValueError: raise ValueError( "The GH issue specification must have 2 items and issue must be number") elif isinstance(description, int): if self.DEFAULT_REPOSITORY is None: raise ValueError("You must specify github/default_repo in env.yaml!") self.issue = description elif isinstance(description, basestring): try: owner, repo, issue_num = re.match(r"^([^/]+)/([^/:]+):([0-9]+)$", str(description).strip()).groups() except AttributeError: raise ValueError( "Could not parse '{}' as a proper GH issue anchor!".format(str(description))) else: self._repo = "{}/{}".format(owner, repo) self.issue = int(issue_num) else: raise ValueError("GH issue specified wrong") @property def data(self): identifier = "{}:{}".format(self.repo, self.issue) if identifier not in self._issue_cache: self._issue_cache[identifier] = self.github.get_repo(self.repo).get_issue(self.issue) return self._issue_cache[identifier] @property def blocks(self): if self.upstream_only and version.appliance_is_downstream(): return False if self.data.state == "closed": return False # Now let's check versions if self.since is None and self.until is None: # No version specifics return True elif self.since is not None and self.until is not None: # since inclusive, until exclusive return self.since <= version.current_version() < self.until elif self.since is not None: # Only since return version.current_version() >= self.since elif self.until is not None: # Only until return version.current_version() < self.until # All branches covered @property def repo(self): return self._repo or self.DEFAULT_REPOSITORY def __str__(self): return "GitHub Issue https://github.com/{}/issues/{}".format(self.repo, self.issue) @property def url(self): return "https://github.com/{}/issues/{}".format(self.repo, self.issue) class BZ(Blocker): @classproperty def bugzilla(cls): if not hasattr(cls, "_bugzilla"): cls._bugzilla = Bugzilla.from_config() return cls._bugzilla def __init__(self, bug_id, **kwargs): self.ignore_bugs = kwargs.pop("ignore_bugs", []) super(BZ, self).__init__(**kwargs) self.bug_id = int(bug_id) @property def data(self): return self.bugzilla.resolve_blocker( self.bug_id, ignore_bugs=self.ignore_bugs, force_block_streams=self.forced_streams) @property def bugzilla_bug(self): if self.data is None: return None return self.data @property def blocks(self): try: bug = self.data if bug is None: return False result = False if bug.is_opened: result = True if bug.upstream_bug: if not version.appliance_is_downstream() and bug.can_test_on_upstream: result = False if result is False and version.appliance_is_downstream(): if bug.fixed_in is not None: return version.current_version() < bug.fixed_in return result except xmlrpclib.Fault as e: code = e.faultCode s = e.faultString.strip().split("\n")[0] logger.error("Bugzilla thrown a fault: %s/%s", code, s) logger.warning("Ignoring and taking the bug as non-blocking") store.terminalreporter.write( "Bugzila made a booboo: {}/{}\n".format(code, s), bold=True) return False def get_bug_url(self): bz_url = urlparse(self.bugzilla.bugzilla.url) return "{}://{}/show_bug.cgi?id={}".format(bz_url.scheme, bz_url.netloc, self.bug_id) @property def url(self): return self.get_bug_url() def __str__(self): return "Bugzilla bug {} (or one of its copies)".format(self.get_bug_url())
import random from traceback import format_exc from collections import namedtuple import pytest import fauxfactory from cfme.containers.provider import ContainersProvider from cfme.containers.pod import Pod from cfme.containers.service import Service from cfme.containers.node import Node from cfme.containers.replicator import Replicator from cfme.containers.image import Image from cfme.containers.project import Project from cfme.containers.route import Route from cfme.containers.template import Template from cfme.utils import testgen from cfme.utils.wait import wait_for from cfme.utils.log import logger from cfme.utils.blockers import BZ pytestmark = [ pytest.mark.usefixtures('setup_provider_modscope'), pytest.mark.tier(1)] pytest_generate_tests = testgen.generate([ContainersProvider], scope='module') TEST_OBJECTS = (Image, Pod, Service, Route, Template, Node, Replicator, Project) def check_labels_in_ui(instance, name, expected_value): if hasattr(instance.summary, 'labels') and \ hasattr(instance.summary.labels, name.lower()): return getattr(instance.summary.labels, name.lower()).text_value == str(expected_value) return False @pytest.fixture(scope='module') def random_labels(provider, appliance): # Creating random instance for each object in TEST_OBJECTS and create a random label for it. label_data = namedtuple('label_data', ['instance', 'label_name', 'label_value', 'status_code', 'json_content']) data_collection = [] # Collected data in the form: # <instance>, <label_name>, <label_value>, <results_status> # Adding label to each object: for test_obj in TEST_OBJECTS: get_random_kwargs = {'count': 1, 'appliance': appliance} if test_obj is Image: get_random_kwargs['docker_only'] = True instance = test_obj.get_random_instances(provider, **get_random_kwargs).pop() label_key = fauxfactory.gen_alpha(1) + \ fauxfactory.gen_alphanumeric(random.randrange(1, 62)) value = fauxfactory.gen_alphanumeric(random.randrange(1, 63)) try: status_code, json_content = test_obj.set_label(instance, label_key, value) except: status_code, json_content = None, format_exc() data_collection.append( label_data(instance, label_key, value, status_code, json_content) ) return data_collection # In case that test_labels_remove is skipped we should remove the labels: for _, label_key, status_code, _ in data_collection: if status_code and label_key in instance.get_labels(): instance.remove_label(label_key) @pytest.mark.polarion('CMP-10572') def test_labels_create(provider, soft_assert, random_labels): provider.refresh_provider_relationships() # Verify that the labels appear in the UI: for instance, label_name, label_value, status_code, json_content in random_labels: if soft_assert(status_code, json_content): soft_assert( wait_for( lambda: check_labels_in_ui(instance, label_name, label_value), num_sec=120, delay=10, fail_func=instance.summary.reload, message='Verifying label ({} = {}) for {} {} exists' .format(label_name, label_value, instance.__class__.__name__, instance.name), silent_failure=True), 'Could not find label ({} = {}) for {} {} in UI.' .format(label_name, label_value, instance.__class__.__name__, instance.name) ) @pytest.mark.meta(blockers=[ BZ(1451832, forced_streams=['5.7', '5.8', 'upstream']), BZ(1472383, forced_streams=['5.7', '5.8', 'upstream']), BZ(1469666, forced_streams=['5.7', '5.8', 'upstream']), ]) @pytest.mark.polarion('CMP-10572') def test_labels_remove(provider, soft_assert, random_labels): # Removing the labels for instance, label_name, label_value, status_code, _ in random_labels: if status_code: instance.remove_label(label_name) else: logger.warning('Cannot remove label ({} = {}) for {} {}. (failed to add it previously)' .format(label_name, label_value, instance.__class__.__name__, instance.name)) provider.refresh_provider_relationships() # Verify that the labels removed successfully from UI: for instance, label_name, label_value, status_code, _ in random_labels: if status_code: soft_assert( wait_for( lambda: not check_labels_in_ui(instance, label_name, label_value), num_sec=180, delay=10, fail_func=instance.summary.reload, message='Verifying label ({} = {}) for {} {} removed' .format(label_name, label_value, instance.__class__.__name__, instance.name), silent_failure=True), 'Label ({} = {}) for {} {} found in UI (but should be removed).' .format(label_name, label_value, instance.__class__.__name__, instance.name) )
okolisny/integration_tests
cfme/tests/containers/test_labels.py
cfme/utils/blockers.py
import tempfile from os import listdir, mkdir, makedirs, path from shutil import copy, copyfile, rmtree from subprocess import check_output, CalledProcessError, STDOUT import sys from fauxfactory import gen_alphanumeric from cfme.utils import conf from cfme.utils.providers import providers_data from git import Repo from yaml import load, dump local_git_repo = "manageiq_ansible_module" yml_path = path.join(path.dirname(__file__), local_git_repo) yml_templates_path = path.join(path.dirname(__file__), 'ansible_conf') basic_script = "basic_script.yml" yml = ".yml" random_token = str(gen_alphanumeric(906)) random_miq_user = str(gen_alphanumeric(8)) pulled_repo_library_path = path.join(local_git_repo, 'library') remote_git_repo_url = "git://github.com/dkorn/manageiq-ansible-module.git" def create_tmp_directory(): global lib_path lib_path = tempfile.mkdtemp() lib_sub_path = 'ansible_conf' lib_sub_path_library = path.join(lib_sub_path, 'library') makedirs(path.join((lib_path), lib_sub_path_library)) global library_path_to_copy_to global basic_yml_path library_path_to_copy_to = path.join(lib_path, lib_sub_path_library) basic_yml_path = path.join(lib_path, lib_sub_path) def fetch_miq_ansible_module(): if path.isdir(local_git_repo): rmtree(local_git_repo) mkdir(local_git_repo) if path.isdir(library_path_to_copy_to): rmtree(library_path_to_copy_to) mkdir(library_path_to_copy_to) Repo.clone_from(remote_git_repo_url, local_git_repo) src_files = listdir(pulled_repo_library_path) for file_name in src_files: full_file_name = path.join(pulled_repo_library_path, file_name) if path.isfile(full_file_name): copy(full_file_name, library_path_to_copy_to) rmtree(local_git_repo) def get_values_for_providers_test(provider): return { 'name': provider.name, 'state': 'present', 'miq_url': config_formatter(), 'miq_username': conf.credentials['default'].username, 'miq_password': conf.credentials['default'].password, 'provider_api_hostname': providers_data[provider.name]['endpoints']['default'].hostname, 'provider_api_port': providers_data[provider.name]['endpoints']['default'].api_port, 'provider_api_auth_token': providers_data[provider.name]['endpoints']['default'].token, 'monitoring_hostname': providers_data[provider.name]['endpoints']['hawkular'].hostname, 'monitoring_port': providers_data[provider.name]['endpoints']['hawkular'].api_port } def get_values_for_users_test(): return { 'fullname': 'MIQUser', 'name': 'MIQU', 'password': 'smartvm', 'state': 'present', 'miq_url': config_formatter(), 'miq_username': conf.credentials['default'].username, 'miq_password': conf.credentials['default'].password, } def get_values_for_custom_attributes_test(provider): return { 'entity_type': 'provider', 'entity_name': conf.cfme_data.get('management_systems', {}) [provider.key].get('name', []), 'miq_url': config_formatter(), 'miq_username': conf.credentials['default'].username, 'miq_password': conf.credentials['default'].password, } def get_values_for_tags_test(provider): return { 'resource': 'provider', 'resource_name': provider.name, 'miq_url': config_formatter(), 'miq_username': conf.credentials['default'].username, 'miq_password': conf.credentials['default'].password, } def get_values_from_conf(provider, script_type): if script_type == 'providers': return get_values_for_providers_test(provider) if script_type == 'users': return get_values_for_users_test() if script_type == 'custom_attributes': return get_values_for_custom_attributes_test(provider) if script_type == 'tags': return get_values_for_tags_test(provider) # TODO Avoid reading files every time def read_yml(script, value): with open(yml_path + script + yml, 'r') as f: doc = load(f) return doc[0]['tasks'][0]['manageiq_provider'][value] def get_yml_value(script, value): with open(path.join(basic_yml_path, script) + yml, 'r') as f: doc = load(f) return doc[0]['tasks'][0]['manageiq_provider'][value] def setup_basic_script(provider, script_type): script_path_source = path.join(yml_templates_path, script_type + "_" + basic_script) script_path = path.join(basic_yml_path, script_type + "_" + basic_script) copyfile(script_path_source, script_path) with open(script_path, 'rw') as f: doc = load(f) values_dict = get_values_from_conf(provider, script_type) for key in values_dict: if script_type == 'providers': doc[0]['tasks'][0]['manageiq_provider'][key] = values_dict[key] elif script_type == 'users': doc[0]['tasks'][0]['manageiq_user'][key] = values_dict[key] elif script_type == 'custom_attributes': doc[0]['tasks'][0]['manageiq_custom_attributes'][key] = values_dict[key] elif script_type == 'tags': doc[0]['tasks'][0]['manageiq_tag_assignment'][key] = values_dict[key] with open(script_path, 'w') as f: f.write(dump(doc)) def open_yml(script, script_type): copyfile((path.join(basic_yml_path, script_type + "_" + basic_script)), path.join(basic_yml_path, script + yml)) with open(path.join(basic_yml_path, script + yml), 'rw') as f: return load(f) def write_yml(script, doc): with open(path.join(basic_yml_path, script + yml), 'w') as f: f.write(dump(doc)) def setup_ansible_script(provider, script, script_type=None, values_to_update=None): # This function prepares the ansible scripts to work with the correct # appliance configs that will be received from Jenkins setup_basic_script(provider, script_type) doc = open_yml(script, script_type) if script == 'add_provider': write_yml(script, doc) if script == 'add_provider_ssl': doc[0]['tasks'][0]['manageiq_provider']['provider_verify_ssl'] = 'True' write_yml(script, doc) elif script == 'update_provider': for key in values_to_update: doc[0]['tasks'][0]['manageiq_provider'][key] = values_to_update[key] write_yml(script, doc) elif script == 'remove_provider': doc[0]['tasks'][0]['manageiq_provider']['state'] = 'absent' write_yml(script, doc) elif script == 'remove_non_existing_provider': doc[0]['tasks'][0]['manageiq_provider']['state'] = 'absent' doc[0]['tasks'][0]['manageiq_provider']['name'] = random_miq_user write_yml(script, doc) elif script == 'remove_provider_bad_user': doc[0]['tasks'][0]['manageiq_provider']['miq_username'] = random_miq_user write_yml(script, doc) elif script == 'add_provider_bad_token': doc[0]['tasks'][0]['manageiq_provider']['provider_api_auth_token'] = random_token write_yml(script, doc) elif script == 'add_provider_bad_user': doc[0]['tasks'][0]['manageiq_provider']['miq_username'] = random_miq_user write_yml(script, doc) elif script == 'update_non_existing_provider': doc[0]['tasks'][0]['manageiq_provider']['provider_api_hostname'] = random_miq_user write_yml(script, doc) elif script == 'update_provider_bad_user': for key in values_to_update: doc[0]['tasks'][0]['manageiq_provider'][key] = values_to_update[key] doc[0]['tasks'][0]['manageiq_provider']['miq_username'] = random_miq_user write_yml(script, doc) elif script == 'create_user': for key in values_to_update: doc[0]['tasks'][0]['manageiq_user'][key] = values_to_update[key] write_yml(script, doc) elif script == 'update_user': for key in values_to_update: doc[0]['tasks'][0]['manageiq_user'][key] = values_to_update[key] write_yml(script, doc) elif script == 'create_user_bad_user_name': doc[0]['tasks'][0]['manageiq_user']['miq_username'] = random_miq_user for key in values_to_update: doc[0]['tasks'][0]['manageiq_user'][key] = values_to_update[key] write_yml(script, doc) elif script == 'delete_user': doc[0]['tasks'][0]['manageiq_user']['name'] = values_to_update doc[0]['tasks'][0]['manageiq_user']['state'] = 'absent' write_yml(script, doc) elif script == 'add_custom_attributes': count = 0 while count < len(values_to_update): for key in values_to_update: doc[0]['tasks'][0]['manageiq_custom_attributes']['custom_attributes'][count] = key count += 1 write_yml(script, doc) elif script == 'add_custom_attributes_bad_user': doc[0]['tasks'][0]['manageiq_custom_attributes']['miq_username'] = str(random_miq_user) write_yml(script, doc) elif script == 'remove_custom_attributes': count = 0 doc[0]['tasks'][0]['manageiq_custom_attributes']['state'] = 'absent' while count < len(values_to_update): for key in values_to_update: doc[0]['tasks'][0]['manageiq_custom_attributes']['custom_attributes'][count] = key count += 1 write_yml(script, doc) elif script == 'add_tags': count = 0 while count < len(values_to_update): for key in values_to_update: doc[0]['tasks'][0]['manageiq_tag_assignment']['tags'][count]['category'] = \ values_to_update[count]['category'] doc[0]['tasks'][0]['manageiq_tag_assignment']['tags'][count]['name'] = \ values_to_update[count]['name'] count += 1 doc[0]['tasks'][0]['manageiq_tag_assignment']['state'] = 'present' write_yml(script, doc) elif script == 'remove_tags': count = 0 while count < len(values_to_update): for key in values_to_update: doc[0]['tasks'][0]['manageiq_tag_assignment']['tags'][count]['category'] = \ values_to_update[count]['category'] doc[0]['tasks'][0]['manageiq_tag_assignment']['tags'][count]['name'] = \ values_to_update[count]['name'] count += 1 doc[0]['tasks'][0]['manageiq_tag_assignment']['state'] = 'absent' write_yml(script, doc) def run_ansible(script): ansible_playbook_cmd = "ansible-playbook -e ansible_python_interpreter=" interpreter_path = sys.executable script_path = path.join(basic_yml_path, script + ".yml") cmd = '{}{} {}'.format(ansible_playbook_cmd, interpreter_path, script_path) return run_cmd(cmd) def run_cmd(cmd): try: response = check_output(cmd, shell=True, stderr=STDOUT) except CalledProcessError as exc: print("Status : FAIL", exc.returncode, exc.output) return exc.output else: print("Output: \n{}\n".format(response)) # TODO For further usage with reply statuses test. Not being used at the moment def reply_status(reply): ok_status = reply['stats']['localhost']['ok'] changed_status = reply['stats']['localhost']['changed'] failures_status = reply['stats']['localhost']['failures'] skipped_status = reply['stats']['localhost']['skipped'] message_status = reply['plays'][0]['tasks'][2]['hosts']['localhost']['result']['msg'] if not ok_status == '0': ok_status = 'OK' else: ok_status = 'Failed' if changed_status: return 'Changed', message_status, ok_status elif skipped_status: return 'Skipped', message_status, ok_status elif failures_status: return 'Failed', message_status, ok_status else: return 'No Change', message_status, ok_status def config_formatter(): if "https://" in conf.env.get("base_url", None): return conf.env.get("base_url", None) else: return "https://" + conf.env.get("base_url", None) def remove_tmp_files(): rmtree(lib_path, ignore_errors=True)
import random from traceback import format_exc from collections import namedtuple import pytest import fauxfactory from cfme.containers.provider import ContainersProvider from cfme.containers.pod import Pod from cfme.containers.service import Service from cfme.containers.node import Node from cfme.containers.replicator import Replicator from cfme.containers.image import Image from cfme.containers.project import Project from cfme.containers.route import Route from cfme.containers.template import Template from cfme.utils import testgen from cfme.utils.wait import wait_for from cfme.utils.log import logger from cfme.utils.blockers import BZ pytestmark = [ pytest.mark.usefixtures('setup_provider_modscope'), pytest.mark.tier(1)] pytest_generate_tests = testgen.generate([ContainersProvider], scope='module') TEST_OBJECTS = (Image, Pod, Service, Route, Template, Node, Replicator, Project) def check_labels_in_ui(instance, name, expected_value): if hasattr(instance.summary, 'labels') and \ hasattr(instance.summary.labels, name.lower()): return getattr(instance.summary.labels, name.lower()).text_value == str(expected_value) return False @pytest.fixture(scope='module') def random_labels(provider, appliance): # Creating random instance for each object in TEST_OBJECTS and create a random label for it. label_data = namedtuple('label_data', ['instance', 'label_name', 'label_value', 'status_code', 'json_content']) data_collection = [] # Collected data in the form: # <instance>, <label_name>, <label_value>, <results_status> # Adding label to each object: for test_obj in TEST_OBJECTS: get_random_kwargs = {'count': 1, 'appliance': appliance} if test_obj is Image: get_random_kwargs['docker_only'] = True instance = test_obj.get_random_instances(provider, **get_random_kwargs).pop() label_key = fauxfactory.gen_alpha(1) + \ fauxfactory.gen_alphanumeric(random.randrange(1, 62)) value = fauxfactory.gen_alphanumeric(random.randrange(1, 63)) try: status_code, json_content = test_obj.set_label(instance, label_key, value) except: status_code, json_content = None, format_exc() data_collection.append( label_data(instance, label_key, value, status_code, json_content) ) return data_collection # In case that test_labels_remove is skipped we should remove the labels: for _, label_key, status_code, _ in data_collection: if status_code and label_key in instance.get_labels(): instance.remove_label(label_key) @pytest.mark.polarion('CMP-10572') def test_labels_create(provider, soft_assert, random_labels): provider.refresh_provider_relationships() # Verify that the labels appear in the UI: for instance, label_name, label_value, status_code, json_content in random_labels: if soft_assert(status_code, json_content): soft_assert( wait_for( lambda: check_labels_in_ui(instance, label_name, label_value), num_sec=120, delay=10, fail_func=instance.summary.reload, message='Verifying label ({} = {}) for {} {} exists' .format(label_name, label_value, instance.__class__.__name__, instance.name), silent_failure=True), 'Could not find label ({} = {}) for {} {} in UI.' .format(label_name, label_value, instance.__class__.__name__, instance.name) ) @pytest.mark.meta(blockers=[ BZ(1451832, forced_streams=['5.7', '5.8', 'upstream']), BZ(1472383, forced_streams=['5.7', '5.8', 'upstream']), BZ(1469666, forced_streams=['5.7', '5.8', 'upstream']), ]) @pytest.mark.polarion('CMP-10572') def test_labels_remove(provider, soft_assert, random_labels): # Removing the labels for instance, label_name, label_value, status_code, _ in random_labels: if status_code: instance.remove_label(label_name) else: logger.warning('Cannot remove label ({} = {}) for {} {}. (failed to add it previously)' .format(label_name, label_value, instance.__class__.__name__, instance.name)) provider.refresh_provider_relationships() # Verify that the labels removed successfully from UI: for instance, label_name, label_value, status_code, _ in random_labels: if status_code: soft_assert( wait_for( lambda: not check_labels_in_ui(instance, label_name, label_value), num_sec=180, delay=10, fail_func=instance.summary.reload, message='Verifying label ({} = {}) for {} {} removed' .format(label_name, label_value, instance.__class__.__name__, instance.name), silent_failure=True), 'Label ({} = {}) for {} {} found in UI (but should be removed).' .format(label_name, label_value, instance.__class__.__name__, instance.name) )
okolisny/integration_tests
cfme/tests/containers/test_labels.py
cfme/utils/ansible.py
# Copyright (c) 2016, Xilinx, Inc. # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # 1. Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # # 2. Redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in the # documentation and/or other materials provided with the distribution. # # 3. Neither the name of the copyright holder nor the names of its # contributors may be used to endorse or promote products derived from # this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, # THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR # PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR # CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, # EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, # PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; # OR BUSINESS INTERRUPTION). HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, # WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR # OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF # ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. import math from . import Pmod from . import PMOD_GROVE_G3 from . import PMOD_GROVE_G4 __author__ = "Yun Rock Qu" __copyright__ = "Copyright 2016, Xilinx" __email__ = "pynq_support@xilinx.com" PMOD_GROVE_IMU_PROGRAM = "pmod_grove_imu.bin" CONFIG_IOP_SWITCH = 0x1 GET_ACCL_DATA = 0x3 GET_GYRO_DATA = 0x5 GET_COMPASS_DATA = 0x7 GET_TEMPERATURE = 0xB GET_PRESSURE = 0xD RESET = 0xF def _reg2float(reg): """Converts 32-bit register value to floats in Python. Parameters ---------- reg: int A 32-bit register value read from the mailbox. Returns ------- float A float number translated from the register value. """ if reg == 0: return 0.0 sign = (reg & 0x80000000) >> 31 & 0x01 exp = ((reg & 0x7f800000) >> 23) - 127 if exp == 0: man = (reg & 0x007fffff) / pow(2, 23) else: man = 1 + (reg & 0x007fffff) / pow(2, 23) result = pow(2, exp) * man * ((sign * -2) + 1) return float("{0:.2f}".format(result)) def _reg2int(reg): """Converts 32-bit register value to signed integer in Python. Parameters ---------- reg: int A 32-bit register value read from the mailbox. Returns ------- int A signed integer translated from the register value. """ result = -(reg >> 31 & 0x1) * (1 << 31) for i in range(31): result += (reg >> i & 0x1) * (1 << i) return result class Grove_IMU(object): """This class controls the Grove IIC IMU. Grove IMU 10DOF is a combination of grove IMU 9DOF (MPU9250) and grove barometer sensor (BMP180). MPU-9250 is a 9-axis motion tracking device that combines a 3-axis gyroscope, 3-axis accelerometer, 3-axis magnetometer and a Digital Motion Processor (DMP). BMP180 is a high precision, low power digital pressure sensor. Hardware version: v1.1. Attributes ---------- microblaze : Pmod Microblaze processor instance used by this module. """ def __init__(self, mb_info, gr_pin): """Return a new instance of an Grove IMU object. Parameters ---------- mb_info : dict A dictionary storing Microblaze information, such as the IP name and the reset name. gr_pin: list A group of pins on pmod-grove adapter. """ if gr_pin not in [PMOD_GROVE_G3, PMOD_GROVE_G4]: raise ValueError("Group number can only be G3 - G4.") self.microblaze = Pmod(mb_info, PMOD_GROVE_IMU_PROGRAM) self.microblaze.write_mailbox(0, gr_pin) self.microblaze.write_blocking_command(CONFIG_IOP_SWITCH) self.reset() def reset(self): """Reset all the sensors on the grove IMU. Returns ------- None """ self.microblaze.write_blocking_command(RESET) def get_accl(self): """Get the data from the accelerometer. Returns ------- list A list of the acceleration data along X-axis, Y-axis, and Z-axis. """ self.microblaze.write_blocking_command(GET_ACCL_DATA) data = self.microblaze.read_mailbox(0, 3) [ax, ay, az] = [_reg2int(i) for i in data] return [float("{0:.2f}".format(ax/16384)), float("{0:.2f}".format(ay/16384)), float("{0:.2f}".format(az/16384))] def get_gyro(self): """Get the data from the gyroscope. Returns ------- list A list of the gyro data along X-axis, Y-axis, and Z-axis. """ self.microblaze.write_blocking_command(GET_GYRO_DATA) data = self.microblaze.read_mailbox(0, 3) [gx, gy, gz] = [_reg2int(i) for i in data] return [float("{0:.2f}".format(gx*250/32768)), float("{0:.2f}".format(gy*250/32768)), float("{0:.2f}".format(gz*250/32768))] def get_compass(self): """Get the data from the magnetometer. Returns ------- list A list of the compass data along X-axis, Y-axis, and Z-axis. """ self.microblaze.write_blocking_command(GET_COMPASS_DATA) data = self.microblaze.read_mailbox(0, 3) [mx, my, mz] = [_reg2int(i) for i in data] return [float("{0:.2f}".format(mx*1200/4096)), float("{0:.2f}".format(my*1200/4096)), float("{0:.2f}".format(mz*1200/4096))] def get_heading(self): """Get the value of the heading. Returns ------- float The angle deviated from the X-axis, toward the positive Y-axis. """ [mx, my, _] = self.get_compass() heading = 180 * math.atan2(my, mx) / math.pi if heading < 0: heading += 360 return float("{0:.2f}".format(heading)) def get_tilt_heading(self): """Get the value of the tilt heading. Returns ------- float The tilt heading value. """ [ax, ay, _] = self.get_accl() [mx, my, mz] = self.get_compass() try: pitch = math.asin(-ax) roll = math.asin(ay / math.cos(pitch)) except ZeroDivisionError: raise RuntimeError("Value out of range or device not connected.") xh = mx * math.cos(pitch) + mz * math.sin(pitch) yh = mx * math.sin(roll) * math.sin(pitch) + \ my * math.cos(roll) - mz * math.sin(roll) * math.cos(pitch) _ = -mx * math.cos(roll) * math.sin(pitch) + \ my * math.sin(roll) + mz * math.cos(roll) * math.cos(pitch) tilt_heading = 180 * math.atan2(yh, xh) / math.pi if yh < 0: tilt_heading += 360 return float("{0:.2f}".format(tilt_heading)) def get_temperature(self): """Get the current temperature in degree C. Returns ------- float The temperature value. """ self.microblaze.write_blocking_command(GET_TEMPERATURE) value = self.microblaze.read_mailbox(0) return _reg2float(value) def get_pressure(self): """Get the current pressure in Pa. Returns ------- float The pressure value. """ self.microblaze.write_blocking_command(GET_PRESSURE) value = self.microblaze.read_mailbox(0) return _reg2float(value) def get_atm(self): """Get the current pressure in relative atmosphere. Returns ------- float The related atmosphere. """ return float("{0:.2f}".format(self.get_pressure()/101325)) def get_altitude(self): """Get the current altitude. Returns ------- float The altitude value. """ pressure = self.get_pressure() a = pressure/101325 b = 1/5.255 c = 1-pow(a, b) altitude = 44300 * c return float("{0:.2f}".format(altitude))
# Copyright (c) 2016, Xilinx, Inc. # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # 1. Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # # 2. Redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in the # documentation and/or other materials provided with the distribution. # # 3. Neither the name of the copyright holder nor the names of its # contributors may be used to endorse or promote products derived from # this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, # THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR # PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR # CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, # EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, # PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; # OR BUSINESS INTERRUPTION). HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, # WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR # OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF # ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. from random import randint from time import sleep import pytest from pynq import Overlay from pynq.lib.pmod import Pmod_Cable from pynq.lib.pmod import PMODA from pynq.lib.pmod import PMODB from pynq.tests.util import user_answer_yes from pynq.tests.util import get_interface_id __author__ = "Yun Rock Qu" __copyright__ = "Copyright 2016, Xilinx" __email__ = "pynq_support@xilinx.com" try: _ = Overlay('base.bit', download=False) flag0 = True except IOError: flag0 = False flag1 = user_answer_yes("\nTwo Pmod interfaces connected by a cable?") if flag1: send_id = eval(get_interface_id('sender', options=['PMODA', 'PMODB'])) recv_id = eval(get_interface_id('receiver', options=['PMODA', 'PMODB'])) flag = flag0 and flag1 @pytest.mark.skipif(not flag, reason="need Pmod cable attached to the base overlay") def test_pmod_cable(): """Tests for the Pmod cable. The following tests are involved here: 1. Test the Pmod cable type. 2. Test for right shifting the bit "1". The sender will send patterns with the bit "1" right shifted each time. 3. Test for right shifting the bit "0". The sender will send patterns with the bit "0" right shifted each time. 4. Test for left shifting the bit "1". The sender will send patterns with the bit "1" left shifted each time. 5. Test for left shifting the bit "0". The sender will send patterns with the bit "0" left shifted each time. 6. Test software-generated pseudo-random numbers. Random 0/1's are generated at each bit location. 8 bits (1 bit per pin) are sent out in every iteration. This test may take a few seconds to finish. Note ---- The cable type can only be 'straight' or 'loopback'. Default cable type is straight. The Pmod IO layout is: Upper row: {vdd,gnd,3,2,1,0}. Lower row: {vdd,gnd,7,6,5,4}. """ ol = Overlay('base.bit') print('\nTesting Pmod IO cable...') assert not send_id == recv_id, \ "The sender port cannot be the receiver port." tx = [Pmod_Cable(send_id, k, 'out', 'loopback') for k in range(8)] rx = [Pmod_Cable(recv_id, k, 'in', 'loopback') for k in range(8)] tx[0].write(0) tx[3].write(0) tx[4].write(1) tx[7].write(1) if [rx[0].read(), rx[3].read(), rx[4].read(), rx[7].read()] == \ [0, 0, 1, 1]: # Using a loop-back cable for i in range(8): rx[i].set_cable('loopback') elif [rx[0].read(), rx[3].read(), rx[4].read(), rx[7].read()] == \ [1, 1, 0, 0]: # Using a straight cable for i in range(8): rx[i].set_cable('straight') else: raise AssertionError("Cable unrecognizable.") print('Generating tests for right shifting a \"1\"...') send_data = [1, 0, 0, 0, 0, 0, 0, 0] for i in range(8): if i != 0: send_data = send_data[-1:]+send_data[:-1] recv_data = [0, 0, 0, 0, 0, 0, 0, 0] tx[i].write(send_data[i]) sleep(0.001) recv_data[i] = rx[i].read() assert send_data == recv_data,\ 'Sent {} != received {} at Pin {}.'.format(send_data, recv_data, i) print('Generating tests for right shifting a \"0\"...') send_data = [0, 1, 1, 1, 1, 1, 1, 1] for i in range(8): if i != 0: send_data = send_data[-1:]+send_data[:-1] recv_data = [1, 1, 1, 1, 1, 1, 1, 1] tx[i].write(send_data[i]) sleep(0.001) recv_data[i] = rx[i].read() assert send_data == recv_data,\ 'Sent {} != received {} at Pin {}.'.format(send_data, recv_data, i) print('Generating tests for left shifting a \"1\"...') send_data = [0, 0, 0, 0, 0, 0, 0, 1] for i in range(8): if i != 0: send_data = send_data[1:]+send_data[:1] recv_data = [0, 0, 0, 0, 0, 0, 0, 0] tx[7-i].write(send_data[7-i]) sleep(0.001) recv_data[7-i] = rx[7-i].read() assert send_data == recv_data,\ 'Sent {} != received {} at Pin {}' \ .format(send_data, recv_data, 7-i) print('Generating tests for left shifting a \"0\"...') send_data = [1, 1, 1, 1, 1, 1, 1, 0] for i in range(8): if i != 0: send_data = send_data[1:]+send_data[:1] recv_data = [1, 1, 1, 1, 1, 1, 1, 1] tx[7-i].write(send_data[7-i]) sleep(0.001) recv_data[7-i] = rx[7-i].read() assert send_data == recv_data,\ 'Sent {} != received {} at Pin {}' \ .format(send_data, recv_data, 7-i) print('Generating 100 random tests...') for _ in range(100): send_data = [0, 0, 0, 0, 0, 0, 0, 0] recv_data = [1, 1, 1, 1, 1, 1, 1, 1] for j in range(8): send_data[j] = randint(0, 1) tx[j].write(send_data[j]) sleep(0.001) recv_data[j] = rx[j].read() assert send_data == recv_data,\ 'Sent {} != received {}.'.format(send_data, recv_data) ol.reset
yunqu/PYNQ
pynq/lib/pmod/tests/test_pmod_cable.py
pynq/lib/pmod/pmod_grove_imu.py
# Copyright (c) 2016, NECST Laboratory, Politecnico di Milano # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # 1. Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # # 2. Redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in the # documentation and/or other materials provided with the distribution. # # 3. Neither the name of the copyright holder nor the names of its # contributors may be used to endorse or promote products derived from # this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, # THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR # PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR # CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, # EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, # PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; # OR BUSINESS INTERRUPTION). HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, # WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR # OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF # ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. from . import Arduino from . import ARDUINO_GROVE_G1 from . import ARDUINO_GROVE_G2 from . import ARDUINO_GROVE_G3 from . import ARDUINO_GROVE_G4 from . import ARDUINO_GROVE_G5 from . import ARDUINO_GROVE_G6 from . import ARDUINO_GROVE_G7 __author__ = "Marco Rabozzi, Giuseppe Natale" __copyright__ = "Copyright 2016, NECST Laboratory, Politecnico di Milano" ARDUINO_GROVE_EAR_HR_PROGRAM = "arduino_grove_ear_hr.bin" CONFIG_IOP_SWITCH = 0x1 class Grove_EarHR(object): """This class controls the Grove ear clip heart rate sensor. Sensor model: MED03212P. Attributes ---------- microblaze : Arduino Microblaze processor instance used by this module. """ def __init__(self, mb_info, gr_pin): """Return a new instance of an Grove_EarHR object. Parameters ---------- mb_info : dict A dictionary storing Microblaze information, such as the IP name and the reset name. gr_pin: list A group of pins on arduino-grove adapter. """ if gr_pin not in [ARDUINO_GROVE_G1, ARDUINO_GROVE_G2, ARDUINO_GROVE_G3, ARDUINO_GROVE_G4, ARDUINO_GROVE_G5, ARDUINO_GROVE_G6, ARDUINO_GROVE_G7]: raise ValueError("Group number can only be G1 - G7.") self.microblaze = Arduino(mb_info, ARDUINO_GROVE_EAR_HR_PROGRAM) self.microblaze.write_mailbox(0, gr_pin[0]) self.microblaze.write_blocking_command(CONFIG_IOP_SWITCH) def read(self): """Read the heart rate from the sensor. Returns ------- float The heart rate as beats per minute """ beats, interval_ms = self.read_raw() if 0 < interval_ms < 2500: rate = 60000.0 / interval_ms else: raise RuntimeError("Value out of range or device not connected.") return rate def read_raw(self): """Read the number of heart beats. Read the number of beats since the sensor initialization; also read the time elapsed in ms between the latest two heart beats. Returns ------- tuple Number of heart beats and the time elapsed between 2 latest beats. """ beats = self.microblaze.read_mailbox(0x4) interval_ms = self.microblaze.read_mailbox(0x8 + (beats % 4) * 4) return beats, interval_ms
# Copyright (c) 2016, Xilinx, Inc. # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # 1. Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # # 2. Redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in the # documentation and/or other materials provided with the distribution. # # 3. Neither the name of the copyright holder nor the names of its # contributors may be used to endorse or promote products derived from # this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, # THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR # PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR # CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, # EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, # PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; # OR BUSINESS INTERRUPTION). HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, # WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR # OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF # ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. from random import randint from time import sleep import pytest from pynq import Overlay from pynq.lib.pmod import Pmod_Cable from pynq.lib.pmod import PMODA from pynq.lib.pmod import PMODB from pynq.tests.util import user_answer_yes from pynq.tests.util import get_interface_id __author__ = "Yun Rock Qu" __copyright__ = "Copyright 2016, Xilinx" __email__ = "pynq_support@xilinx.com" try: _ = Overlay('base.bit', download=False) flag0 = True except IOError: flag0 = False flag1 = user_answer_yes("\nTwo Pmod interfaces connected by a cable?") if flag1: send_id = eval(get_interface_id('sender', options=['PMODA', 'PMODB'])) recv_id = eval(get_interface_id('receiver', options=['PMODA', 'PMODB'])) flag = flag0 and flag1 @pytest.mark.skipif(not flag, reason="need Pmod cable attached to the base overlay") def test_pmod_cable(): """Tests for the Pmod cable. The following tests are involved here: 1. Test the Pmod cable type. 2. Test for right shifting the bit "1". The sender will send patterns with the bit "1" right shifted each time. 3. Test for right shifting the bit "0". The sender will send patterns with the bit "0" right shifted each time. 4. Test for left shifting the bit "1". The sender will send patterns with the bit "1" left shifted each time. 5. Test for left shifting the bit "0". The sender will send patterns with the bit "0" left shifted each time. 6. Test software-generated pseudo-random numbers. Random 0/1's are generated at each bit location. 8 bits (1 bit per pin) are sent out in every iteration. This test may take a few seconds to finish. Note ---- The cable type can only be 'straight' or 'loopback'. Default cable type is straight. The Pmod IO layout is: Upper row: {vdd,gnd,3,2,1,0}. Lower row: {vdd,gnd,7,6,5,4}. """ ol = Overlay('base.bit') print('\nTesting Pmod IO cable...') assert not send_id == recv_id, \ "The sender port cannot be the receiver port." tx = [Pmod_Cable(send_id, k, 'out', 'loopback') for k in range(8)] rx = [Pmod_Cable(recv_id, k, 'in', 'loopback') for k in range(8)] tx[0].write(0) tx[3].write(0) tx[4].write(1) tx[7].write(1) if [rx[0].read(), rx[3].read(), rx[4].read(), rx[7].read()] == \ [0, 0, 1, 1]: # Using a loop-back cable for i in range(8): rx[i].set_cable('loopback') elif [rx[0].read(), rx[3].read(), rx[4].read(), rx[7].read()] == \ [1, 1, 0, 0]: # Using a straight cable for i in range(8): rx[i].set_cable('straight') else: raise AssertionError("Cable unrecognizable.") print('Generating tests for right shifting a \"1\"...') send_data = [1, 0, 0, 0, 0, 0, 0, 0] for i in range(8): if i != 0: send_data = send_data[-1:]+send_data[:-1] recv_data = [0, 0, 0, 0, 0, 0, 0, 0] tx[i].write(send_data[i]) sleep(0.001) recv_data[i] = rx[i].read() assert send_data == recv_data,\ 'Sent {} != received {} at Pin {}.'.format(send_data, recv_data, i) print('Generating tests for right shifting a \"0\"...') send_data = [0, 1, 1, 1, 1, 1, 1, 1] for i in range(8): if i != 0: send_data = send_data[-1:]+send_data[:-1] recv_data = [1, 1, 1, 1, 1, 1, 1, 1] tx[i].write(send_data[i]) sleep(0.001) recv_data[i] = rx[i].read() assert send_data == recv_data,\ 'Sent {} != received {} at Pin {}.'.format(send_data, recv_data, i) print('Generating tests for left shifting a \"1\"...') send_data = [0, 0, 0, 0, 0, 0, 0, 1] for i in range(8): if i != 0: send_data = send_data[1:]+send_data[:1] recv_data = [0, 0, 0, 0, 0, 0, 0, 0] tx[7-i].write(send_data[7-i]) sleep(0.001) recv_data[7-i] = rx[7-i].read() assert send_data == recv_data,\ 'Sent {} != received {} at Pin {}' \ .format(send_data, recv_data, 7-i) print('Generating tests for left shifting a \"0\"...') send_data = [1, 1, 1, 1, 1, 1, 1, 0] for i in range(8): if i != 0: send_data = send_data[1:]+send_data[:1] recv_data = [1, 1, 1, 1, 1, 1, 1, 1] tx[7-i].write(send_data[7-i]) sleep(0.001) recv_data[7-i] = rx[7-i].read() assert send_data == recv_data,\ 'Sent {} != received {} at Pin {}' \ .format(send_data, recv_data, 7-i) print('Generating 100 random tests...') for _ in range(100): send_data = [0, 0, 0, 0, 0, 0, 0, 0] recv_data = [1, 1, 1, 1, 1, 1, 1, 1] for j in range(8): send_data[j] = randint(0, 1) tx[j].write(send_data[j]) sleep(0.001) recv_data[j] = rx[j].read() assert send_data == recv_data,\ 'Sent {} != received {}.'.format(send_data, recv_data) ol.reset
yunqu/PYNQ
pynq/lib/pmod/tests/test_pmod_cable.py
pynq/lib/arduino/arduino_grove_ear_hr.py
"""Config flow for Mobile App.""" import uuid from homeassistant import config_entries from homeassistant.components import person from homeassistant.helpers import entity_registry from .const import ATTR_APP_ID, ATTR_DEVICE_ID, ATTR_DEVICE_NAME, CONF_USER_ID, DOMAIN @config_entries.HANDLERS.register(DOMAIN) class MobileAppFlowHandler(config_entries.ConfigFlow): """Handle a Mobile App config flow.""" VERSION = 1 CONNECTION_CLASS = config_entries.CONN_CLASS_CLOUD_PUSH async def async_step_user(self, user_input=None): """Handle a flow initialized by the user.""" placeholders = { "apps_url": "https://www.home-assistant.io/integrations/mobile_app/#apps" } return self.async_abort( reason="install_app", description_placeholders=placeholders ) async def async_step_registration(self, user_input=None): """Handle a flow initialized during registration.""" if ATTR_DEVICE_ID in user_input: # Unique ID is combi of app + device ID. await self.async_set_unique_id( f"{user_input[ATTR_APP_ID]}-{user_input[ATTR_DEVICE_ID]}" ) else: user_input[ATTR_DEVICE_ID] = str(uuid.uuid4()).replace("-", "") # Register device tracker entity and add to person registering app ent_reg = await entity_registry.async_get_registry(self.hass) devt_entry = ent_reg.async_get_or_create( "device_tracker", DOMAIN, user_input[ATTR_DEVICE_ID], suggested_object_id=user_input[ATTR_DEVICE_NAME], ) await person.async_add_user_device_tracker( self.hass, user_input[CONF_USER_ID], devt_entry.entity_id ) return self.async_create_entry( title=user_input[ATTR_DEVICE_NAME], data=user_input )
"""The tests for the hassio component.""" import os import pytest from homeassistant.auth.const import GROUP_ID_ADMIN from homeassistant.components import frontend from homeassistant.components.hassio import STORAGE_KEY from homeassistant.setup import async_setup_component from tests.async_mock import patch MOCK_ENVIRON = {"HASSIO": "127.0.0.1", "HASSIO_TOKEN": "abcdefgh"} @pytest.fixture(autouse=True) def mock_all(aioclient_mock): """Mock all setup requests.""" aioclient_mock.post("http://127.0.0.1/homeassistant/options", json={"result": "ok"}) aioclient_mock.get("http://127.0.0.1/supervisor/ping", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/supervisor/options", json={"result": "ok"}) aioclient_mock.get( "http://127.0.0.1/info", json={ "result": "ok", "data": {"supervisor": "222", "homeassistant": "0.110.0", "hassos": None}, }, ) aioclient_mock.get( "http://127.0.0.1/host/info", json={ "result": "ok", "data": { "result": "ok", "data": { "chassis": "vm", "operating_system": "Debian GNU/Linux 10 (buster)", "kernel": "4.19.0-6-amd64", }, }, }, ) aioclient_mock.get( "http://127.0.0.1/ingress/panels", json={"result": "ok", "data": {"panels": {}}} ) async def test_setup_api_ping(hass, aioclient_mock): """Test setup with API ping.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {}) assert result assert aioclient_mock.call_count == 6 assert hass.components.hassio.get_homeassistant_version() == "0.110.0" assert hass.components.hassio.is_hassio() async def test_setup_api_panel(hass, aioclient_mock): """Test setup with API ping.""" assert await async_setup_component(hass, "frontend", {}) with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {}) assert result panels = hass.data[frontend.DATA_PANELS] assert panels.get("hassio").to_response() == { "component_name": "custom", "icon": "hass:home-assistant", "title": "Supervisor", "url_path": "hassio", "require_admin": True, "config": { "_panel_custom": { "embed_iframe": True, "js_url": "/api/hassio/app/entrypoint.js", "name": "hassio-main", "trust_external": False, } }, } async def test_setup_api_push_api_data(hass, aioclient_mock): """Test setup with API push.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component( hass, "hassio", {"http": {"server_port": 9999}, "hassio": {}} ) assert result assert aioclient_mock.call_count == 6 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 9999 assert aioclient_mock.mock_calls[1][2]["watchdog"] async def test_setup_api_push_api_data_server_host(hass, aioclient_mock): """Test setup with API push with active server host.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component( hass, "hassio", {"http": {"server_port": 9999, "server_host": "127.0.0.1"}, "hassio": {}}, ) assert result assert aioclient_mock.call_count == 6 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 9999 assert not aioclient_mock.mock_calls[1][2]["watchdog"] async def test_setup_api_push_api_data_default(hass, aioclient_mock, hass_storage): """Test setup with API push default data.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"http": {}, "hassio": {}}) assert result assert aioclient_mock.call_count == 6 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 8123 refresh_token = aioclient_mock.mock_calls[1][2]["refresh_token"] hassio_user = await hass.auth.async_get_user( hass_storage[STORAGE_KEY]["data"]["hassio_user"] ) assert hassio_user is not None assert hassio_user.system_generated assert len(hassio_user.groups) == 1 assert hassio_user.groups[0].id == GROUP_ID_ADMIN for token in hassio_user.refresh_tokens.values(): if token.token == refresh_token: break else: assert False, "refresh token not found" async def test_setup_adds_admin_group_to_user(hass, aioclient_mock, hass_storage): """Test setup with API push default data.""" # Create user without admin user = await hass.auth.async_create_system_user("Hass.io") assert not user.is_admin await hass.auth.async_create_refresh_token(user) hass_storage[STORAGE_KEY] = { "data": {"hassio_user": user.id}, "key": STORAGE_KEY, "version": 1, } with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"http": {}, "hassio": {}}) assert result assert user.is_admin async def test_setup_api_existing_hassio_user(hass, aioclient_mock, hass_storage): """Test setup with API push default data.""" user = await hass.auth.async_create_system_user("Hass.io test") token = await hass.auth.async_create_refresh_token(user) hass_storage[STORAGE_KEY] = {"version": 1, "data": {"hassio_user": user.id}} with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"http": {}, "hassio": {}}) assert result assert aioclient_mock.call_count == 6 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 8123 assert aioclient_mock.mock_calls[1][2]["refresh_token"] == token.token async def test_setup_core_push_timezone(hass, aioclient_mock): """Test setup with API push default data.""" hass.config.time_zone = "testzone" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"hassio": {}}) assert result assert aioclient_mock.call_count == 6 assert aioclient_mock.mock_calls[2][2]["timezone"] == "testzone" await hass.config.async_update(time_zone="America/New_York") await hass.async_block_till_done() assert aioclient_mock.mock_calls[-1][2]["timezone"] == "America/New_York" async def test_setup_hassio_no_additional_data(hass, aioclient_mock): """Test setup with API push default data.""" with patch.dict(os.environ, MOCK_ENVIRON), patch.dict( os.environ, {"HASSIO_TOKEN": "123456"} ): result = await async_setup_component(hass, "hassio", {"hassio": {}}) assert result assert aioclient_mock.call_count == 6 assert aioclient_mock.mock_calls[-1][3]["X-Hassio-Key"] == "123456" async def test_fail_setup_without_environ_var(hass): """Fail setup if no environ variable set.""" with patch.dict(os.environ, {}, clear=True): result = await async_setup_component(hass, "hassio", {}) assert not result async def test_warn_when_cannot_connect(hass, caplog): """Fail warn when we cannot connect.""" with patch.dict(os.environ, MOCK_ENVIRON), patch( "homeassistant.components.hassio.HassIO.is_connected", return_value=None, ): result = await async_setup_component(hass, "hassio", {}) assert result assert hass.components.hassio.is_hassio() assert "Not connected with Hass.io / system too busy!" in caplog.text async def test_service_register(hassio_env, hass): """Check if service will be setup.""" assert await async_setup_component(hass, "hassio", {}) assert hass.services.has_service("hassio", "addon_start") assert hass.services.has_service("hassio", "addon_stop") assert hass.services.has_service("hassio", "addon_restart") assert hass.services.has_service("hassio", "addon_stdin") assert hass.services.has_service("hassio", "host_shutdown") assert hass.services.has_service("hassio", "host_reboot") assert hass.services.has_service("hassio", "host_reboot") assert hass.services.has_service("hassio", "snapshot_full") assert hass.services.has_service("hassio", "snapshot_partial") assert hass.services.has_service("hassio", "restore_full") assert hass.services.has_service("hassio", "restore_partial") async def test_service_calls(hassio_env, hass, aioclient_mock): """Call service and check the API calls behind that.""" assert await async_setup_component(hass, "hassio", {}) aioclient_mock.post("http://127.0.0.1/addons/test/start", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/addons/test/stop", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/addons/test/restart", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/addons/test/stdin", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/host/shutdown", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/host/reboot", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/snapshots/new/full", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/snapshots/new/partial", json={"result": "ok"}) aioclient_mock.post( "http://127.0.0.1/snapshots/test/restore/full", json={"result": "ok"} ) aioclient_mock.post( "http://127.0.0.1/snapshots/test/restore/partial", json={"result": "ok"} ) await hass.services.async_call("hassio", "addon_start", {"addon": "test"}) await hass.services.async_call("hassio", "addon_stop", {"addon": "test"}) await hass.services.async_call("hassio", "addon_restart", {"addon": "test"}) await hass.services.async_call( "hassio", "addon_stdin", {"addon": "test", "input": "test"} ) await hass.async_block_till_done() assert aioclient_mock.call_count == 7 assert aioclient_mock.mock_calls[-1][2] == "test" await hass.services.async_call("hassio", "host_shutdown", {}) await hass.services.async_call("hassio", "host_reboot", {}) await hass.async_block_till_done() assert aioclient_mock.call_count == 9 await hass.services.async_call("hassio", "snapshot_full", {}) await hass.services.async_call( "hassio", "snapshot_partial", {"addons": ["test"], "folders": ["ssl"], "password": "123456"}, ) await hass.async_block_till_done() assert aioclient_mock.call_count == 11 assert aioclient_mock.mock_calls[-1][2] == { "addons": ["test"], "folders": ["ssl"], "password": "123456", } await hass.services.async_call("hassio", "restore_full", {"snapshot": "test"}) await hass.services.async_call( "hassio", "restore_partial", { "snapshot": "test", "homeassistant": False, "addons": ["test"], "folders": ["ssl"], "password": "123456", }, ) await hass.async_block_till_done() assert aioclient_mock.call_count == 13 assert aioclient_mock.mock_calls[-1][2] == { "addons": ["test"], "folders": ["ssl"], "homeassistant": False, "password": "123456", } async def test_service_calls_core(hassio_env, hass, aioclient_mock): """Call core service and check the API calls behind that.""" assert await async_setup_component(hass, "hassio", {}) aioclient_mock.post("http://127.0.0.1/homeassistant/restart", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/homeassistant/stop", json={"result": "ok"}) await hass.services.async_call("homeassistant", "stop") await hass.async_block_till_done() assert aioclient_mock.call_count == 4 await hass.services.async_call("homeassistant", "check_config") await hass.async_block_till_done() assert aioclient_mock.call_count == 4 with patch( "homeassistant.config.async_check_ha_config_file", return_value=None ) as mock_check_config: await hass.services.async_call("homeassistant", "restart") await hass.async_block_till_done() assert mock_check_config.called assert aioclient_mock.call_count == 5
pschmitt/home-assistant
tests/components/hassio/test_init.py
homeassistant/components/mobile_app/config_flow.py
"""Config flow for Elexa Guardian integration.""" from aioguardian import Client from aioguardian.errors import GuardianError import voluptuous as vol from homeassistant import config_entries, core from homeassistant.const import CONF_IP_ADDRESS, CONF_PORT from homeassistant.core import callback from .const import CONF_UID, DOMAIN, LOGGER # pylint:disable=unused-import DATA_SCHEMA = vol.Schema( {vol.Required(CONF_IP_ADDRESS): str, vol.Required(CONF_PORT, default=7777): int} ) UNIQUE_ID = "guardian_{0}" @callback def async_get_pin_from_discovery_hostname(hostname): """Get the device's 4-digit PIN from its zeroconf-discovered hostname.""" return hostname.split(".")[0].split("-")[1] @callback def async_get_pin_from_uid(uid): """Get the device's 4-digit PIN from its UID.""" return uid[-4:] async def validate_input(hass: core.HomeAssistant, data): """Validate the user input allows us to connect. Data has the keys from DATA_SCHEMA with values provided by the user. """ async with Client(data[CONF_IP_ADDRESS]) as client: ping_data = await client.system.ping() return { CONF_UID: ping_data["data"]["uid"], } class ConfigFlow(config_entries.ConfigFlow, domain=DOMAIN): """Handle a config flow for Elexa Guardian.""" VERSION = 1 CONNECTION_CLASS = config_entries.CONN_CLASS_LOCAL_POLL def __init__(self): """Initialize.""" self.discovery_info = {} async def _async_set_unique_id(self, pin): """Set the config entry's unique ID (based on the device's 4-digit PIN).""" await self.async_set_unique_id(UNIQUE_ID.format(pin)) self._abort_if_unique_id_configured() async def async_step_user(self, user_input=None): """Handle configuration via the UI.""" if user_input is None: return self.async_show_form( step_id="user", data_schema=DATA_SCHEMA, errors={} ) try: info = await validate_input(self.hass, user_input) except GuardianError as err: LOGGER.error("Error while connecting to unit: %s", err) return self.async_show_form( step_id="user", data_schema=DATA_SCHEMA, errors={CONF_IP_ADDRESS: "cannot_connect"}, ) pin = async_get_pin_from_uid(info[CONF_UID]) await self._async_set_unique_id(pin) return self.async_create_entry( title=info[CONF_UID], data={CONF_UID: info["uid"], **user_input} ) async def async_step_zeroconf(self, discovery_info): """Handle the configuration via zeroconf.""" if discovery_info is None: return self.async_abort(reason="connection_error") pin = async_get_pin_from_discovery_hostname(discovery_info["hostname"]) await self._async_set_unique_id(pin) # pylint: disable=no-member # https://github.com/PyCQA/pylint/issues/3167 self.context[CONF_IP_ADDRESS] = discovery_info["host"] if any( discovery_info["host"] == flow["context"][CONF_IP_ADDRESS] for flow in self._async_in_progress() ): return self.async_abort(reason="already_in_progress") self.discovery_info = { CONF_IP_ADDRESS: discovery_info["host"], CONF_PORT: discovery_info["port"], } return await self.async_step_zeroconf_confirm() async def async_step_zeroconf_confirm(self, user_input=None): """Finish the configuration via zeroconf.""" if user_input is None: return self.async_show_form(step_id="zeroconf_confirm") return await self.async_step_user(self.discovery_info)
"""The tests for the hassio component.""" import os import pytest from homeassistant.auth.const import GROUP_ID_ADMIN from homeassistant.components import frontend from homeassistant.components.hassio import STORAGE_KEY from homeassistant.setup import async_setup_component from tests.async_mock import patch MOCK_ENVIRON = {"HASSIO": "127.0.0.1", "HASSIO_TOKEN": "abcdefgh"} @pytest.fixture(autouse=True) def mock_all(aioclient_mock): """Mock all setup requests.""" aioclient_mock.post("http://127.0.0.1/homeassistant/options", json={"result": "ok"}) aioclient_mock.get("http://127.0.0.1/supervisor/ping", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/supervisor/options", json={"result": "ok"}) aioclient_mock.get( "http://127.0.0.1/info", json={ "result": "ok", "data": {"supervisor": "222", "homeassistant": "0.110.0", "hassos": None}, }, ) aioclient_mock.get( "http://127.0.0.1/host/info", json={ "result": "ok", "data": { "result": "ok", "data": { "chassis": "vm", "operating_system": "Debian GNU/Linux 10 (buster)", "kernel": "4.19.0-6-amd64", }, }, }, ) aioclient_mock.get( "http://127.0.0.1/ingress/panels", json={"result": "ok", "data": {"panels": {}}} ) async def test_setup_api_ping(hass, aioclient_mock): """Test setup with API ping.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {}) assert result assert aioclient_mock.call_count == 6 assert hass.components.hassio.get_homeassistant_version() == "0.110.0" assert hass.components.hassio.is_hassio() async def test_setup_api_panel(hass, aioclient_mock): """Test setup with API ping.""" assert await async_setup_component(hass, "frontend", {}) with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {}) assert result panels = hass.data[frontend.DATA_PANELS] assert panels.get("hassio").to_response() == { "component_name": "custom", "icon": "hass:home-assistant", "title": "Supervisor", "url_path": "hassio", "require_admin": True, "config": { "_panel_custom": { "embed_iframe": True, "js_url": "/api/hassio/app/entrypoint.js", "name": "hassio-main", "trust_external": False, } }, } async def test_setup_api_push_api_data(hass, aioclient_mock): """Test setup with API push.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component( hass, "hassio", {"http": {"server_port": 9999}, "hassio": {}} ) assert result assert aioclient_mock.call_count == 6 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 9999 assert aioclient_mock.mock_calls[1][2]["watchdog"] async def test_setup_api_push_api_data_server_host(hass, aioclient_mock): """Test setup with API push with active server host.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component( hass, "hassio", {"http": {"server_port": 9999, "server_host": "127.0.0.1"}, "hassio": {}}, ) assert result assert aioclient_mock.call_count == 6 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 9999 assert not aioclient_mock.mock_calls[1][2]["watchdog"] async def test_setup_api_push_api_data_default(hass, aioclient_mock, hass_storage): """Test setup with API push default data.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"http": {}, "hassio": {}}) assert result assert aioclient_mock.call_count == 6 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 8123 refresh_token = aioclient_mock.mock_calls[1][2]["refresh_token"] hassio_user = await hass.auth.async_get_user( hass_storage[STORAGE_KEY]["data"]["hassio_user"] ) assert hassio_user is not None assert hassio_user.system_generated assert len(hassio_user.groups) == 1 assert hassio_user.groups[0].id == GROUP_ID_ADMIN for token in hassio_user.refresh_tokens.values(): if token.token == refresh_token: break else: assert False, "refresh token not found" async def test_setup_adds_admin_group_to_user(hass, aioclient_mock, hass_storage): """Test setup with API push default data.""" # Create user without admin user = await hass.auth.async_create_system_user("Hass.io") assert not user.is_admin await hass.auth.async_create_refresh_token(user) hass_storage[STORAGE_KEY] = { "data": {"hassio_user": user.id}, "key": STORAGE_KEY, "version": 1, } with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"http": {}, "hassio": {}}) assert result assert user.is_admin async def test_setup_api_existing_hassio_user(hass, aioclient_mock, hass_storage): """Test setup with API push default data.""" user = await hass.auth.async_create_system_user("Hass.io test") token = await hass.auth.async_create_refresh_token(user) hass_storage[STORAGE_KEY] = {"version": 1, "data": {"hassio_user": user.id}} with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"http": {}, "hassio": {}}) assert result assert aioclient_mock.call_count == 6 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 8123 assert aioclient_mock.mock_calls[1][2]["refresh_token"] == token.token async def test_setup_core_push_timezone(hass, aioclient_mock): """Test setup with API push default data.""" hass.config.time_zone = "testzone" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"hassio": {}}) assert result assert aioclient_mock.call_count == 6 assert aioclient_mock.mock_calls[2][2]["timezone"] == "testzone" await hass.config.async_update(time_zone="America/New_York") await hass.async_block_till_done() assert aioclient_mock.mock_calls[-1][2]["timezone"] == "America/New_York" async def test_setup_hassio_no_additional_data(hass, aioclient_mock): """Test setup with API push default data.""" with patch.dict(os.environ, MOCK_ENVIRON), patch.dict( os.environ, {"HASSIO_TOKEN": "123456"} ): result = await async_setup_component(hass, "hassio", {"hassio": {}}) assert result assert aioclient_mock.call_count == 6 assert aioclient_mock.mock_calls[-1][3]["X-Hassio-Key"] == "123456" async def test_fail_setup_without_environ_var(hass): """Fail setup if no environ variable set.""" with patch.dict(os.environ, {}, clear=True): result = await async_setup_component(hass, "hassio", {}) assert not result async def test_warn_when_cannot_connect(hass, caplog): """Fail warn when we cannot connect.""" with patch.dict(os.environ, MOCK_ENVIRON), patch( "homeassistant.components.hassio.HassIO.is_connected", return_value=None, ): result = await async_setup_component(hass, "hassio", {}) assert result assert hass.components.hassio.is_hassio() assert "Not connected with Hass.io / system too busy!" in caplog.text async def test_service_register(hassio_env, hass): """Check if service will be setup.""" assert await async_setup_component(hass, "hassio", {}) assert hass.services.has_service("hassio", "addon_start") assert hass.services.has_service("hassio", "addon_stop") assert hass.services.has_service("hassio", "addon_restart") assert hass.services.has_service("hassio", "addon_stdin") assert hass.services.has_service("hassio", "host_shutdown") assert hass.services.has_service("hassio", "host_reboot") assert hass.services.has_service("hassio", "host_reboot") assert hass.services.has_service("hassio", "snapshot_full") assert hass.services.has_service("hassio", "snapshot_partial") assert hass.services.has_service("hassio", "restore_full") assert hass.services.has_service("hassio", "restore_partial") async def test_service_calls(hassio_env, hass, aioclient_mock): """Call service and check the API calls behind that.""" assert await async_setup_component(hass, "hassio", {}) aioclient_mock.post("http://127.0.0.1/addons/test/start", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/addons/test/stop", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/addons/test/restart", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/addons/test/stdin", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/host/shutdown", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/host/reboot", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/snapshots/new/full", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/snapshots/new/partial", json={"result": "ok"}) aioclient_mock.post( "http://127.0.0.1/snapshots/test/restore/full", json={"result": "ok"} ) aioclient_mock.post( "http://127.0.0.1/snapshots/test/restore/partial", json={"result": "ok"} ) await hass.services.async_call("hassio", "addon_start", {"addon": "test"}) await hass.services.async_call("hassio", "addon_stop", {"addon": "test"}) await hass.services.async_call("hassio", "addon_restart", {"addon": "test"}) await hass.services.async_call( "hassio", "addon_stdin", {"addon": "test", "input": "test"} ) await hass.async_block_till_done() assert aioclient_mock.call_count == 7 assert aioclient_mock.mock_calls[-1][2] == "test" await hass.services.async_call("hassio", "host_shutdown", {}) await hass.services.async_call("hassio", "host_reboot", {}) await hass.async_block_till_done() assert aioclient_mock.call_count == 9 await hass.services.async_call("hassio", "snapshot_full", {}) await hass.services.async_call( "hassio", "snapshot_partial", {"addons": ["test"], "folders": ["ssl"], "password": "123456"}, ) await hass.async_block_till_done() assert aioclient_mock.call_count == 11 assert aioclient_mock.mock_calls[-1][2] == { "addons": ["test"], "folders": ["ssl"], "password": "123456", } await hass.services.async_call("hassio", "restore_full", {"snapshot": "test"}) await hass.services.async_call( "hassio", "restore_partial", { "snapshot": "test", "homeassistant": False, "addons": ["test"], "folders": ["ssl"], "password": "123456", }, ) await hass.async_block_till_done() assert aioclient_mock.call_count == 13 assert aioclient_mock.mock_calls[-1][2] == { "addons": ["test"], "folders": ["ssl"], "homeassistant": False, "password": "123456", } async def test_service_calls_core(hassio_env, hass, aioclient_mock): """Call core service and check the API calls behind that.""" assert await async_setup_component(hass, "hassio", {}) aioclient_mock.post("http://127.0.0.1/homeassistant/restart", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/homeassistant/stop", json={"result": "ok"}) await hass.services.async_call("homeassistant", "stop") await hass.async_block_till_done() assert aioclient_mock.call_count == 4 await hass.services.async_call("homeassistant", "check_config") await hass.async_block_till_done() assert aioclient_mock.call_count == 4 with patch( "homeassistant.config.async_check_ha_config_file", return_value=None ) as mock_check_config: await hass.services.async_call("homeassistant", "restart") await hass.async_block_till_done() assert mock_check_config.called assert aioclient_mock.call_count == 5
pschmitt/home-assistant
tests/components/hassio/test_init.py
homeassistant/components/guardian/config_flow.py
"""Config flow for Monoprice 6-Zone Amplifier integration.""" import logging from pymonoprice import get_async_monoprice from serial import SerialException import voluptuous as vol from homeassistant import config_entries, core, exceptions from homeassistant.const import CONF_PORT from .const import ( CONF_SOURCE_1, CONF_SOURCE_2, CONF_SOURCE_3, CONF_SOURCE_4, CONF_SOURCE_5, CONF_SOURCE_6, CONF_SOURCES, ) from .const import DOMAIN # pylint:disable=unused-import _LOGGER = logging.getLogger(__name__) SOURCES = [ CONF_SOURCE_1, CONF_SOURCE_2, CONF_SOURCE_3, CONF_SOURCE_4, CONF_SOURCE_5, CONF_SOURCE_6, ] OPTIONS_FOR_DATA = {vol.Optional(source): str for source in SOURCES} DATA_SCHEMA = vol.Schema({vol.Required(CONF_PORT): str, **OPTIONS_FOR_DATA}) @core.callback def _sources_from_config(data): sources_config = { str(idx + 1): data.get(source) for idx, source in enumerate(SOURCES) } return { index: name.strip() for index, name in sources_config.items() if (name is not None and name.strip() != "") } async def validate_input(hass: core.HomeAssistant, data): """Validate the user input allows us to connect. Data has the keys from DATA_SCHEMA with values provided by the user. """ try: await get_async_monoprice(data[CONF_PORT], hass.loop) except SerialException: _LOGGER.error("Error connecting to Monoprice controller") raise CannotConnect sources = _sources_from_config(data) # Return info that you want to store in the config entry. return {CONF_PORT: data[CONF_PORT], CONF_SOURCES: sources} class ConfigFlow(config_entries.ConfigFlow, domain=DOMAIN): """Handle a config flow for Monoprice 6-Zone Amplifier.""" VERSION = 1 CONNECTION_CLASS = config_entries.CONN_CLASS_LOCAL_POLL async def async_step_user(self, user_input=None): """Handle the initial step.""" errors = {} if user_input is not None: try: info = await validate_input(self.hass, user_input) return self.async_create_entry(title=user_input[CONF_PORT], data=info) except CannotConnect: errors["base"] = "cannot_connect" except Exception: # pylint: disable=broad-except _LOGGER.exception("Unexpected exception") errors["base"] = "unknown" return self.async_show_form( step_id="user", data_schema=DATA_SCHEMA, errors=errors ) @staticmethod @core.callback def async_get_options_flow(config_entry): """Define the config flow to handle options.""" return MonopriceOptionsFlowHandler(config_entry) @core.callback def _key_for_source(index, source, previous_sources): if str(index) in previous_sources: key = vol.Optional( source, description={"suggested_value": previous_sources[str(index)]} ) else: key = vol.Optional(source) return key class MonopriceOptionsFlowHandler(config_entries.OptionsFlow): """Handle a Monoprice options flow.""" def __init__(self, config_entry): """Initialize.""" self.config_entry = config_entry @core.callback def _previous_sources(self): if CONF_SOURCES in self.config_entry.options: previous = self.config_entry.options[CONF_SOURCES] else: previous = self.config_entry.data[CONF_SOURCES] return previous async def async_step_init(self, user_input=None): """Manage the options.""" if user_input is not None: return self.async_create_entry( title="", data={CONF_SOURCES: _sources_from_config(user_input)} ) previous_sources = self._previous_sources() options = { _key_for_source(idx + 1, source, previous_sources): str for idx, source in enumerate(SOURCES) } return self.async_show_form(step_id="init", data_schema=vol.Schema(options),) class CannotConnect(exceptions.HomeAssistantError): """Error to indicate we cannot connect."""
"""The tests for the hassio component.""" import os import pytest from homeassistant.auth.const import GROUP_ID_ADMIN from homeassistant.components import frontend from homeassistant.components.hassio import STORAGE_KEY from homeassistant.setup import async_setup_component from tests.async_mock import patch MOCK_ENVIRON = {"HASSIO": "127.0.0.1", "HASSIO_TOKEN": "abcdefgh"} @pytest.fixture(autouse=True) def mock_all(aioclient_mock): """Mock all setup requests.""" aioclient_mock.post("http://127.0.0.1/homeassistant/options", json={"result": "ok"}) aioclient_mock.get("http://127.0.0.1/supervisor/ping", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/supervisor/options", json={"result": "ok"}) aioclient_mock.get( "http://127.0.0.1/info", json={ "result": "ok", "data": {"supervisor": "222", "homeassistant": "0.110.0", "hassos": None}, }, ) aioclient_mock.get( "http://127.0.0.1/host/info", json={ "result": "ok", "data": { "result": "ok", "data": { "chassis": "vm", "operating_system": "Debian GNU/Linux 10 (buster)", "kernel": "4.19.0-6-amd64", }, }, }, ) aioclient_mock.get( "http://127.0.0.1/ingress/panels", json={"result": "ok", "data": {"panels": {}}} ) async def test_setup_api_ping(hass, aioclient_mock): """Test setup with API ping.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {}) assert result assert aioclient_mock.call_count == 6 assert hass.components.hassio.get_homeassistant_version() == "0.110.0" assert hass.components.hassio.is_hassio() async def test_setup_api_panel(hass, aioclient_mock): """Test setup with API ping.""" assert await async_setup_component(hass, "frontend", {}) with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {}) assert result panels = hass.data[frontend.DATA_PANELS] assert panels.get("hassio").to_response() == { "component_name": "custom", "icon": "hass:home-assistant", "title": "Supervisor", "url_path": "hassio", "require_admin": True, "config": { "_panel_custom": { "embed_iframe": True, "js_url": "/api/hassio/app/entrypoint.js", "name": "hassio-main", "trust_external": False, } }, } async def test_setup_api_push_api_data(hass, aioclient_mock): """Test setup with API push.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component( hass, "hassio", {"http": {"server_port": 9999}, "hassio": {}} ) assert result assert aioclient_mock.call_count == 6 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 9999 assert aioclient_mock.mock_calls[1][2]["watchdog"] async def test_setup_api_push_api_data_server_host(hass, aioclient_mock): """Test setup with API push with active server host.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component( hass, "hassio", {"http": {"server_port": 9999, "server_host": "127.0.0.1"}, "hassio": {}}, ) assert result assert aioclient_mock.call_count == 6 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 9999 assert not aioclient_mock.mock_calls[1][2]["watchdog"] async def test_setup_api_push_api_data_default(hass, aioclient_mock, hass_storage): """Test setup with API push default data.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"http": {}, "hassio": {}}) assert result assert aioclient_mock.call_count == 6 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 8123 refresh_token = aioclient_mock.mock_calls[1][2]["refresh_token"] hassio_user = await hass.auth.async_get_user( hass_storage[STORAGE_KEY]["data"]["hassio_user"] ) assert hassio_user is not None assert hassio_user.system_generated assert len(hassio_user.groups) == 1 assert hassio_user.groups[0].id == GROUP_ID_ADMIN for token in hassio_user.refresh_tokens.values(): if token.token == refresh_token: break else: assert False, "refresh token not found" async def test_setup_adds_admin_group_to_user(hass, aioclient_mock, hass_storage): """Test setup with API push default data.""" # Create user without admin user = await hass.auth.async_create_system_user("Hass.io") assert not user.is_admin await hass.auth.async_create_refresh_token(user) hass_storage[STORAGE_KEY] = { "data": {"hassio_user": user.id}, "key": STORAGE_KEY, "version": 1, } with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"http": {}, "hassio": {}}) assert result assert user.is_admin async def test_setup_api_existing_hassio_user(hass, aioclient_mock, hass_storage): """Test setup with API push default data.""" user = await hass.auth.async_create_system_user("Hass.io test") token = await hass.auth.async_create_refresh_token(user) hass_storage[STORAGE_KEY] = {"version": 1, "data": {"hassio_user": user.id}} with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"http": {}, "hassio": {}}) assert result assert aioclient_mock.call_count == 6 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 8123 assert aioclient_mock.mock_calls[1][2]["refresh_token"] == token.token async def test_setup_core_push_timezone(hass, aioclient_mock): """Test setup with API push default data.""" hass.config.time_zone = "testzone" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"hassio": {}}) assert result assert aioclient_mock.call_count == 6 assert aioclient_mock.mock_calls[2][2]["timezone"] == "testzone" await hass.config.async_update(time_zone="America/New_York") await hass.async_block_till_done() assert aioclient_mock.mock_calls[-1][2]["timezone"] == "America/New_York" async def test_setup_hassio_no_additional_data(hass, aioclient_mock): """Test setup with API push default data.""" with patch.dict(os.environ, MOCK_ENVIRON), patch.dict( os.environ, {"HASSIO_TOKEN": "123456"} ): result = await async_setup_component(hass, "hassio", {"hassio": {}}) assert result assert aioclient_mock.call_count == 6 assert aioclient_mock.mock_calls[-1][3]["X-Hassio-Key"] == "123456" async def test_fail_setup_without_environ_var(hass): """Fail setup if no environ variable set.""" with patch.dict(os.environ, {}, clear=True): result = await async_setup_component(hass, "hassio", {}) assert not result async def test_warn_when_cannot_connect(hass, caplog): """Fail warn when we cannot connect.""" with patch.dict(os.environ, MOCK_ENVIRON), patch( "homeassistant.components.hassio.HassIO.is_connected", return_value=None, ): result = await async_setup_component(hass, "hassio", {}) assert result assert hass.components.hassio.is_hassio() assert "Not connected with Hass.io / system too busy!" in caplog.text async def test_service_register(hassio_env, hass): """Check if service will be setup.""" assert await async_setup_component(hass, "hassio", {}) assert hass.services.has_service("hassio", "addon_start") assert hass.services.has_service("hassio", "addon_stop") assert hass.services.has_service("hassio", "addon_restart") assert hass.services.has_service("hassio", "addon_stdin") assert hass.services.has_service("hassio", "host_shutdown") assert hass.services.has_service("hassio", "host_reboot") assert hass.services.has_service("hassio", "host_reboot") assert hass.services.has_service("hassio", "snapshot_full") assert hass.services.has_service("hassio", "snapshot_partial") assert hass.services.has_service("hassio", "restore_full") assert hass.services.has_service("hassio", "restore_partial") async def test_service_calls(hassio_env, hass, aioclient_mock): """Call service and check the API calls behind that.""" assert await async_setup_component(hass, "hassio", {}) aioclient_mock.post("http://127.0.0.1/addons/test/start", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/addons/test/stop", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/addons/test/restart", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/addons/test/stdin", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/host/shutdown", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/host/reboot", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/snapshots/new/full", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/snapshots/new/partial", json={"result": "ok"}) aioclient_mock.post( "http://127.0.0.1/snapshots/test/restore/full", json={"result": "ok"} ) aioclient_mock.post( "http://127.0.0.1/snapshots/test/restore/partial", json={"result": "ok"} ) await hass.services.async_call("hassio", "addon_start", {"addon": "test"}) await hass.services.async_call("hassio", "addon_stop", {"addon": "test"}) await hass.services.async_call("hassio", "addon_restart", {"addon": "test"}) await hass.services.async_call( "hassio", "addon_stdin", {"addon": "test", "input": "test"} ) await hass.async_block_till_done() assert aioclient_mock.call_count == 7 assert aioclient_mock.mock_calls[-1][2] == "test" await hass.services.async_call("hassio", "host_shutdown", {}) await hass.services.async_call("hassio", "host_reboot", {}) await hass.async_block_till_done() assert aioclient_mock.call_count == 9 await hass.services.async_call("hassio", "snapshot_full", {}) await hass.services.async_call( "hassio", "snapshot_partial", {"addons": ["test"], "folders": ["ssl"], "password": "123456"}, ) await hass.async_block_till_done() assert aioclient_mock.call_count == 11 assert aioclient_mock.mock_calls[-1][2] == { "addons": ["test"], "folders": ["ssl"], "password": "123456", } await hass.services.async_call("hassio", "restore_full", {"snapshot": "test"}) await hass.services.async_call( "hassio", "restore_partial", { "snapshot": "test", "homeassistant": False, "addons": ["test"], "folders": ["ssl"], "password": "123456", }, ) await hass.async_block_till_done() assert aioclient_mock.call_count == 13 assert aioclient_mock.mock_calls[-1][2] == { "addons": ["test"], "folders": ["ssl"], "homeassistant": False, "password": "123456", } async def test_service_calls_core(hassio_env, hass, aioclient_mock): """Call core service and check the API calls behind that.""" assert await async_setup_component(hass, "hassio", {}) aioclient_mock.post("http://127.0.0.1/homeassistant/restart", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/homeassistant/stop", json={"result": "ok"}) await hass.services.async_call("homeassistant", "stop") await hass.async_block_till_done() assert aioclient_mock.call_count == 4 await hass.services.async_call("homeassistant", "check_config") await hass.async_block_till_done() assert aioclient_mock.call_count == 4 with patch( "homeassistant.config.async_check_ha_config_file", return_value=None ) as mock_check_config: await hass.services.async_call("homeassistant", "restart") await hass.async_block_till_done() assert mock_check_config.called assert aioclient_mock.call_count == 5
pschmitt/home-assistant
tests/components/hassio/test_init.py
homeassistant/components/monoprice/config_flow.py
"""Support for Gogogate2 garage Doors.""" import logging from typing import Callable, List, Optional from gogogate2_api.common import Door, DoorStatus, get_configured_doors, get_door_by_id import voluptuous as vol from homeassistant.components.cover import ( DEVICE_CLASS_GARAGE, SUPPORT_CLOSE, SUPPORT_OPEN, CoverEntity, ) from homeassistant.config_entries import SOURCE_IMPORT, ConfigEntry from homeassistant.const import CONF_IP_ADDRESS, CONF_PASSWORD, CONF_USERNAME from homeassistant.core import HomeAssistant, callback import homeassistant.helpers.config_validation as cv from homeassistant.helpers.entity import Entity from .common import ( GogoGateDataUpdateCoordinator, cover_unique_id, get_data_update_coordinator, ) from .const import DOMAIN _LOGGER = logging.getLogger(__name__) COVER_SCHEMA = vol.Schema( { vol.Required(CONF_IP_ADDRESS): cv.string, vol.Required(CONF_PASSWORD): cv.string, vol.Required(CONF_USERNAME): cv.string, } ) async def async_setup_platform( hass: HomeAssistant, config: dict, add_entities: Callable, discovery_info=None ) -> None: """Convert old style file configs to new style configs.""" hass.async_create_task( hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_IMPORT}, data=config ) ) async def async_setup_entry( hass: HomeAssistant, config_entry: ConfigEntry, async_add_entities: Callable[[List[Entity], Optional[bool]], None], ) -> None: """Set up the config entry.""" data_update_coordinator = get_data_update_coordinator(hass, config_entry) async_add_entities( [ Gogogate2Cover(config_entry, data_update_coordinator, door) for door in get_configured_doors(data_update_coordinator.data) ] ) class Gogogate2Cover(CoverEntity): """Cover entity for goggate2.""" def __init__( self, config_entry: ConfigEntry, data_update_coordinator: GogoGateDataUpdateCoordinator, door: Door, ) -> None: """Initialize the object.""" self._config_entry = config_entry self._data_update_coordinator = data_update_coordinator self._door = door self._api = data_update_coordinator.api self._unique_id = cover_unique_id(config_entry, door) self._is_available = True @property def available(self) -> bool: """Return True if entity is available.""" return self._is_available @property def should_poll(self) -> bool: """Return False as the data manager handles dispatching data.""" return False @property def unique_id(self) -> Optional[str]: """Return a unique ID.""" return self._unique_id @property def name(self): """Return the name of the door.""" return self._door.name @property def is_closed(self): """Return true if cover is closed, else False.""" if self._door.status == DoorStatus.OPENED: return False if self._door.status == DoorStatus.CLOSED: return True return None @property def device_class(self): """Return the class of this device, from component DEVICE_CLASSES.""" return DEVICE_CLASS_GARAGE @property def supported_features(self): """Flag supported features.""" return SUPPORT_OPEN | SUPPORT_CLOSE async def async_open_cover(self, **kwargs): """Open the door.""" await self.hass.async_add_executor_job(self._api.open_door, self._door.door_id) async def async_close_cover(self, **kwargs): """Close the door.""" await self.hass.async_add_executor_job(self._api.close_door, self._door.door_id) @property def state_attributes(self): """Return the state attributes.""" attrs = super().state_attributes attrs["door_id"] = self._door.door_id return attrs @callback def async_on_data_updated(self) -> None: """Receive data from data dispatcher.""" if not self._data_update_coordinator.last_update_success: self._is_available = False self.async_write_ha_state() return door = get_door_by_id(self._door.door_id, self._data_update_coordinator.data) # Set the state. self._door = door self._is_available = True self.async_write_ha_state() async def async_added_to_hass(self) -> None: """Register update dispatcher.""" self.async_on_remove( self._data_update_coordinator.async_add_listener(self.async_on_data_updated) )
"""The tests for the hassio component.""" import os import pytest from homeassistant.auth.const import GROUP_ID_ADMIN from homeassistant.components import frontend from homeassistant.components.hassio import STORAGE_KEY from homeassistant.setup import async_setup_component from tests.async_mock import patch MOCK_ENVIRON = {"HASSIO": "127.0.0.1", "HASSIO_TOKEN": "abcdefgh"} @pytest.fixture(autouse=True) def mock_all(aioclient_mock): """Mock all setup requests.""" aioclient_mock.post("http://127.0.0.1/homeassistant/options", json={"result": "ok"}) aioclient_mock.get("http://127.0.0.1/supervisor/ping", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/supervisor/options", json={"result": "ok"}) aioclient_mock.get( "http://127.0.0.1/info", json={ "result": "ok", "data": {"supervisor": "222", "homeassistant": "0.110.0", "hassos": None}, }, ) aioclient_mock.get( "http://127.0.0.1/host/info", json={ "result": "ok", "data": { "result": "ok", "data": { "chassis": "vm", "operating_system": "Debian GNU/Linux 10 (buster)", "kernel": "4.19.0-6-amd64", }, }, }, ) aioclient_mock.get( "http://127.0.0.1/ingress/panels", json={"result": "ok", "data": {"panels": {}}} ) async def test_setup_api_ping(hass, aioclient_mock): """Test setup with API ping.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {}) assert result assert aioclient_mock.call_count == 6 assert hass.components.hassio.get_homeassistant_version() == "0.110.0" assert hass.components.hassio.is_hassio() async def test_setup_api_panel(hass, aioclient_mock): """Test setup with API ping.""" assert await async_setup_component(hass, "frontend", {}) with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {}) assert result panels = hass.data[frontend.DATA_PANELS] assert panels.get("hassio").to_response() == { "component_name": "custom", "icon": "hass:home-assistant", "title": "Supervisor", "url_path": "hassio", "require_admin": True, "config": { "_panel_custom": { "embed_iframe": True, "js_url": "/api/hassio/app/entrypoint.js", "name": "hassio-main", "trust_external": False, } }, } async def test_setup_api_push_api_data(hass, aioclient_mock): """Test setup with API push.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component( hass, "hassio", {"http": {"server_port": 9999}, "hassio": {}} ) assert result assert aioclient_mock.call_count == 6 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 9999 assert aioclient_mock.mock_calls[1][2]["watchdog"] async def test_setup_api_push_api_data_server_host(hass, aioclient_mock): """Test setup with API push with active server host.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component( hass, "hassio", {"http": {"server_port": 9999, "server_host": "127.0.0.1"}, "hassio": {}}, ) assert result assert aioclient_mock.call_count == 6 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 9999 assert not aioclient_mock.mock_calls[1][2]["watchdog"] async def test_setup_api_push_api_data_default(hass, aioclient_mock, hass_storage): """Test setup with API push default data.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"http": {}, "hassio": {}}) assert result assert aioclient_mock.call_count == 6 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 8123 refresh_token = aioclient_mock.mock_calls[1][2]["refresh_token"] hassio_user = await hass.auth.async_get_user( hass_storage[STORAGE_KEY]["data"]["hassio_user"] ) assert hassio_user is not None assert hassio_user.system_generated assert len(hassio_user.groups) == 1 assert hassio_user.groups[0].id == GROUP_ID_ADMIN for token in hassio_user.refresh_tokens.values(): if token.token == refresh_token: break else: assert False, "refresh token not found" async def test_setup_adds_admin_group_to_user(hass, aioclient_mock, hass_storage): """Test setup with API push default data.""" # Create user without admin user = await hass.auth.async_create_system_user("Hass.io") assert not user.is_admin await hass.auth.async_create_refresh_token(user) hass_storage[STORAGE_KEY] = { "data": {"hassio_user": user.id}, "key": STORAGE_KEY, "version": 1, } with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"http": {}, "hassio": {}}) assert result assert user.is_admin async def test_setup_api_existing_hassio_user(hass, aioclient_mock, hass_storage): """Test setup with API push default data.""" user = await hass.auth.async_create_system_user("Hass.io test") token = await hass.auth.async_create_refresh_token(user) hass_storage[STORAGE_KEY] = {"version": 1, "data": {"hassio_user": user.id}} with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"http": {}, "hassio": {}}) assert result assert aioclient_mock.call_count == 6 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 8123 assert aioclient_mock.mock_calls[1][2]["refresh_token"] == token.token async def test_setup_core_push_timezone(hass, aioclient_mock): """Test setup with API push default data.""" hass.config.time_zone = "testzone" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"hassio": {}}) assert result assert aioclient_mock.call_count == 6 assert aioclient_mock.mock_calls[2][2]["timezone"] == "testzone" await hass.config.async_update(time_zone="America/New_York") await hass.async_block_till_done() assert aioclient_mock.mock_calls[-1][2]["timezone"] == "America/New_York" async def test_setup_hassio_no_additional_data(hass, aioclient_mock): """Test setup with API push default data.""" with patch.dict(os.environ, MOCK_ENVIRON), patch.dict( os.environ, {"HASSIO_TOKEN": "123456"} ): result = await async_setup_component(hass, "hassio", {"hassio": {}}) assert result assert aioclient_mock.call_count == 6 assert aioclient_mock.mock_calls[-1][3]["X-Hassio-Key"] == "123456" async def test_fail_setup_without_environ_var(hass): """Fail setup if no environ variable set.""" with patch.dict(os.environ, {}, clear=True): result = await async_setup_component(hass, "hassio", {}) assert not result async def test_warn_when_cannot_connect(hass, caplog): """Fail warn when we cannot connect.""" with patch.dict(os.environ, MOCK_ENVIRON), patch( "homeassistant.components.hassio.HassIO.is_connected", return_value=None, ): result = await async_setup_component(hass, "hassio", {}) assert result assert hass.components.hassio.is_hassio() assert "Not connected with Hass.io / system too busy!" in caplog.text async def test_service_register(hassio_env, hass): """Check if service will be setup.""" assert await async_setup_component(hass, "hassio", {}) assert hass.services.has_service("hassio", "addon_start") assert hass.services.has_service("hassio", "addon_stop") assert hass.services.has_service("hassio", "addon_restart") assert hass.services.has_service("hassio", "addon_stdin") assert hass.services.has_service("hassio", "host_shutdown") assert hass.services.has_service("hassio", "host_reboot") assert hass.services.has_service("hassio", "host_reboot") assert hass.services.has_service("hassio", "snapshot_full") assert hass.services.has_service("hassio", "snapshot_partial") assert hass.services.has_service("hassio", "restore_full") assert hass.services.has_service("hassio", "restore_partial") async def test_service_calls(hassio_env, hass, aioclient_mock): """Call service and check the API calls behind that.""" assert await async_setup_component(hass, "hassio", {}) aioclient_mock.post("http://127.0.0.1/addons/test/start", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/addons/test/stop", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/addons/test/restart", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/addons/test/stdin", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/host/shutdown", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/host/reboot", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/snapshots/new/full", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/snapshots/new/partial", json={"result": "ok"}) aioclient_mock.post( "http://127.0.0.1/snapshots/test/restore/full", json={"result": "ok"} ) aioclient_mock.post( "http://127.0.0.1/snapshots/test/restore/partial", json={"result": "ok"} ) await hass.services.async_call("hassio", "addon_start", {"addon": "test"}) await hass.services.async_call("hassio", "addon_stop", {"addon": "test"}) await hass.services.async_call("hassio", "addon_restart", {"addon": "test"}) await hass.services.async_call( "hassio", "addon_stdin", {"addon": "test", "input": "test"} ) await hass.async_block_till_done() assert aioclient_mock.call_count == 7 assert aioclient_mock.mock_calls[-1][2] == "test" await hass.services.async_call("hassio", "host_shutdown", {}) await hass.services.async_call("hassio", "host_reboot", {}) await hass.async_block_till_done() assert aioclient_mock.call_count == 9 await hass.services.async_call("hassio", "snapshot_full", {}) await hass.services.async_call( "hassio", "snapshot_partial", {"addons": ["test"], "folders": ["ssl"], "password": "123456"}, ) await hass.async_block_till_done() assert aioclient_mock.call_count == 11 assert aioclient_mock.mock_calls[-1][2] == { "addons": ["test"], "folders": ["ssl"], "password": "123456", } await hass.services.async_call("hassio", "restore_full", {"snapshot": "test"}) await hass.services.async_call( "hassio", "restore_partial", { "snapshot": "test", "homeassistant": False, "addons": ["test"], "folders": ["ssl"], "password": "123456", }, ) await hass.async_block_till_done() assert aioclient_mock.call_count == 13 assert aioclient_mock.mock_calls[-1][2] == { "addons": ["test"], "folders": ["ssl"], "homeassistant": False, "password": "123456", } async def test_service_calls_core(hassio_env, hass, aioclient_mock): """Call core service and check the API calls behind that.""" assert await async_setup_component(hass, "hassio", {}) aioclient_mock.post("http://127.0.0.1/homeassistant/restart", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/homeassistant/stop", json={"result": "ok"}) await hass.services.async_call("homeassistant", "stop") await hass.async_block_till_done() assert aioclient_mock.call_count == 4 await hass.services.async_call("homeassistant", "check_config") await hass.async_block_till_done() assert aioclient_mock.call_count == 4 with patch( "homeassistant.config.async_check_ha_config_file", return_value=None ) as mock_check_config: await hass.services.async_call("homeassistant", "restart") await hass.async_block_till_done() assert mock_check_config.called assert aioclient_mock.call_count == 5
pschmitt/home-assistant
tests/components/hassio/test_init.py
homeassistant/components/gogogate2/cover.py
"""Allows to configure a switch using RPi GPIO.""" import logging import voluptuous as vol from homeassistant.components import rpi_gpio from homeassistant.components.switch import PLATFORM_SCHEMA from homeassistant.const import DEVICE_DEFAULT_NAME import homeassistant.helpers.config_validation as cv from homeassistant.helpers.entity import ToggleEntity _LOGGER = logging.getLogger(__name__) CONF_PULL_MODE = "pull_mode" CONF_PORTS = "ports" CONF_INVERT_LOGIC = "invert_logic" DEFAULT_INVERT_LOGIC = False _SWITCHES_SCHEMA = vol.Schema({cv.positive_int: cv.string}) PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( { vol.Required(CONF_PORTS): _SWITCHES_SCHEMA, vol.Optional(CONF_INVERT_LOGIC, default=DEFAULT_INVERT_LOGIC): cv.boolean, } ) def setup_platform(hass, config, add_entities, discovery_info=None): """Set up the Raspberry PI GPIO devices.""" invert_logic = config.get(CONF_INVERT_LOGIC) switches = [] ports = config.get(CONF_PORTS) for port, name in ports.items(): switches.append(RPiGPIOSwitch(name, port, invert_logic)) add_entities(switches) class RPiGPIOSwitch(ToggleEntity): """Representation of a Raspberry Pi GPIO.""" def __init__(self, name, port, invert_logic): """Initialize the pin.""" self._name = name or DEVICE_DEFAULT_NAME self._port = port self._invert_logic = invert_logic self._state = False rpi_gpio.setup_output(self._port) rpi_gpio.write_output(self._port, 1 if self._invert_logic else 0) @property def name(self): """Return the name of the switch.""" return self._name @property def should_poll(self): """No polling needed.""" return False @property def is_on(self): """Return true if device is on.""" return self._state def turn_on(self, **kwargs): """Turn the device on.""" rpi_gpio.write_output(self._port, 0 if self._invert_logic else 1) self._state = True self.schedule_update_ha_state() def turn_off(self, **kwargs): """Turn the device off.""" rpi_gpio.write_output(self._port, 1 if self._invert_logic else 0) self._state = False self.schedule_update_ha_state()
"""The tests for the hassio component.""" import os import pytest from homeassistant.auth.const import GROUP_ID_ADMIN from homeassistant.components import frontend from homeassistant.components.hassio import STORAGE_KEY from homeassistant.setup import async_setup_component from tests.async_mock import patch MOCK_ENVIRON = {"HASSIO": "127.0.0.1", "HASSIO_TOKEN": "abcdefgh"} @pytest.fixture(autouse=True) def mock_all(aioclient_mock): """Mock all setup requests.""" aioclient_mock.post("http://127.0.0.1/homeassistant/options", json={"result": "ok"}) aioclient_mock.get("http://127.0.0.1/supervisor/ping", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/supervisor/options", json={"result": "ok"}) aioclient_mock.get( "http://127.0.0.1/info", json={ "result": "ok", "data": {"supervisor": "222", "homeassistant": "0.110.0", "hassos": None}, }, ) aioclient_mock.get( "http://127.0.0.1/host/info", json={ "result": "ok", "data": { "result": "ok", "data": { "chassis": "vm", "operating_system": "Debian GNU/Linux 10 (buster)", "kernel": "4.19.0-6-amd64", }, }, }, ) aioclient_mock.get( "http://127.0.0.1/ingress/panels", json={"result": "ok", "data": {"panels": {}}} ) async def test_setup_api_ping(hass, aioclient_mock): """Test setup with API ping.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {}) assert result assert aioclient_mock.call_count == 6 assert hass.components.hassio.get_homeassistant_version() == "0.110.0" assert hass.components.hassio.is_hassio() async def test_setup_api_panel(hass, aioclient_mock): """Test setup with API ping.""" assert await async_setup_component(hass, "frontend", {}) with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {}) assert result panels = hass.data[frontend.DATA_PANELS] assert panels.get("hassio").to_response() == { "component_name": "custom", "icon": "hass:home-assistant", "title": "Supervisor", "url_path": "hassio", "require_admin": True, "config": { "_panel_custom": { "embed_iframe": True, "js_url": "/api/hassio/app/entrypoint.js", "name": "hassio-main", "trust_external": False, } }, } async def test_setup_api_push_api_data(hass, aioclient_mock): """Test setup with API push.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component( hass, "hassio", {"http": {"server_port": 9999}, "hassio": {}} ) assert result assert aioclient_mock.call_count == 6 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 9999 assert aioclient_mock.mock_calls[1][2]["watchdog"] async def test_setup_api_push_api_data_server_host(hass, aioclient_mock): """Test setup with API push with active server host.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component( hass, "hassio", {"http": {"server_port": 9999, "server_host": "127.0.0.1"}, "hassio": {}}, ) assert result assert aioclient_mock.call_count == 6 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 9999 assert not aioclient_mock.mock_calls[1][2]["watchdog"] async def test_setup_api_push_api_data_default(hass, aioclient_mock, hass_storage): """Test setup with API push default data.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"http": {}, "hassio": {}}) assert result assert aioclient_mock.call_count == 6 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 8123 refresh_token = aioclient_mock.mock_calls[1][2]["refresh_token"] hassio_user = await hass.auth.async_get_user( hass_storage[STORAGE_KEY]["data"]["hassio_user"] ) assert hassio_user is not None assert hassio_user.system_generated assert len(hassio_user.groups) == 1 assert hassio_user.groups[0].id == GROUP_ID_ADMIN for token in hassio_user.refresh_tokens.values(): if token.token == refresh_token: break else: assert False, "refresh token not found" async def test_setup_adds_admin_group_to_user(hass, aioclient_mock, hass_storage): """Test setup with API push default data.""" # Create user without admin user = await hass.auth.async_create_system_user("Hass.io") assert not user.is_admin await hass.auth.async_create_refresh_token(user) hass_storage[STORAGE_KEY] = { "data": {"hassio_user": user.id}, "key": STORAGE_KEY, "version": 1, } with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"http": {}, "hassio": {}}) assert result assert user.is_admin async def test_setup_api_existing_hassio_user(hass, aioclient_mock, hass_storage): """Test setup with API push default data.""" user = await hass.auth.async_create_system_user("Hass.io test") token = await hass.auth.async_create_refresh_token(user) hass_storage[STORAGE_KEY] = {"version": 1, "data": {"hassio_user": user.id}} with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"http": {}, "hassio": {}}) assert result assert aioclient_mock.call_count == 6 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 8123 assert aioclient_mock.mock_calls[1][2]["refresh_token"] == token.token async def test_setup_core_push_timezone(hass, aioclient_mock): """Test setup with API push default data.""" hass.config.time_zone = "testzone" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"hassio": {}}) assert result assert aioclient_mock.call_count == 6 assert aioclient_mock.mock_calls[2][2]["timezone"] == "testzone" await hass.config.async_update(time_zone="America/New_York") await hass.async_block_till_done() assert aioclient_mock.mock_calls[-1][2]["timezone"] == "America/New_York" async def test_setup_hassio_no_additional_data(hass, aioclient_mock): """Test setup with API push default data.""" with patch.dict(os.environ, MOCK_ENVIRON), patch.dict( os.environ, {"HASSIO_TOKEN": "123456"} ): result = await async_setup_component(hass, "hassio", {"hassio": {}}) assert result assert aioclient_mock.call_count == 6 assert aioclient_mock.mock_calls[-1][3]["X-Hassio-Key"] == "123456" async def test_fail_setup_without_environ_var(hass): """Fail setup if no environ variable set.""" with patch.dict(os.environ, {}, clear=True): result = await async_setup_component(hass, "hassio", {}) assert not result async def test_warn_when_cannot_connect(hass, caplog): """Fail warn when we cannot connect.""" with patch.dict(os.environ, MOCK_ENVIRON), patch( "homeassistant.components.hassio.HassIO.is_connected", return_value=None, ): result = await async_setup_component(hass, "hassio", {}) assert result assert hass.components.hassio.is_hassio() assert "Not connected with Hass.io / system too busy!" in caplog.text async def test_service_register(hassio_env, hass): """Check if service will be setup.""" assert await async_setup_component(hass, "hassio", {}) assert hass.services.has_service("hassio", "addon_start") assert hass.services.has_service("hassio", "addon_stop") assert hass.services.has_service("hassio", "addon_restart") assert hass.services.has_service("hassio", "addon_stdin") assert hass.services.has_service("hassio", "host_shutdown") assert hass.services.has_service("hassio", "host_reboot") assert hass.services.has_service("hassio", "host_reboot") assert hass.services.has_service("hassio", "snapshot_full") assert hass.services.has_service("hassio", "snapshot_partial") assert hass.services.has_service("hassio", "restore_full") assert hass.services.has_service("hassio", "restore_partial") async def test_service_calls(hassio_env, hass, aioclient_mock): """Call service and check the API calls behind that.""" assert await async_setup_component(hass, "hassio", {}) aioclient_mock.post("http://127.0.0.1/addons/test/start", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/addons/test/stop", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/addons/test/restart", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/addons/test/stdin", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/host/shutdown", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/host/reboot", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/snapshots/new/full", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/snapshots/new/partial", json={"result": "ok"}) aioclient_mock.post( "http://127.0.0.1/snapshots/test/restore/full", json={"result": "ok"} ) aioclient_mock.post( "http://127.0.0.1/snapshots/test/restore/partial", json={"result": "ok"} ) await hass.services.async_call("hassio", "addon_start", {"addon": "test"}) await hass.services.async_call("hassio", "addon_stop", {"addon": "test"}) await hass.services.async_call("hassio", "addon_restart", {"addon": "test"}) await hass.services.async_call( "hassio", "addon_stdin", {"addon": "test", "input": "test"} ) await hass.async_block_till_done() assert aioclient_mock.call_count == 7 assert aioclient_mock.mock_calls[-1][2] == "test" await hass.services.async_call("hassio", "host_shutdown", {}) await hass.services.async_call("hassio", "host_reboot", {}) await hass.async_block_till_done() assert aioclient_mock.call_count == 9 await hass.services.async_call("hassio", "snapshot_full", {}) await hass.services.async_call( "hassio", "snapshot_partial", {"addons": ["test"], "folders": ["ssl"], "password": "123456"}, ) await hass.async_block_till_done() assert aioclient_mock.call_count == 11 assert aioclient_mock.mock_calls[-1][2] == { "addons": ["test"], "folders": ["ssl"], "password": "123456", } await hass.services.async_call("hassio", "restore_full", {"snapshot": "test"}) await hass.services.async_call( "hassio", "restore_partial", { "snapshot": "test", "homeassistant": False, "addons": ["test"], "folders": ["ssl"], "password": "123456", }, ) await hass.async_block_till_done() assert aioclient_mock.call_count == 13 assert aioclient_mock.mock_calls[-1][2] == { "addons": ["test"], "folders": ["ssl"], "homeassistant": False, "password": "123456", } async def test_service_calls_core(hassio_env, hass, aioclient_mock): """Call core service and check the API calls behind that.""" assert await async_setup_component(hass, "hassio", {}) aioclient_mock.post("http://127.0.0.1/homeassistant/restart", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/homeassistant/stop", json={"result": "ok"}) await hass.services.async_call("homeassistant", "stop") await hass.async_block_till_done() assert aioclient_mock.call_count == 4 await hass.services.async_call("homeassistant", "check_config") await hass.async_block_till_done() assert aioclient_mock.call_count == 4 with patch( "homeassistant.config.async_check_ha_config_file", return_value=None ) as mock_check_config: await hass.services.async_call("homeassistant", "restart") await hass.async_block_till_done() assert mock_check_config.called assert aioclient_mock.call_count == 5
pschmitt/home-assistant
tests/components/hassio/test_init.py
homeassistant/components/rpi_gpio/switch.py
"""Support for the OpenWeatherMap (OWM) service.""" from datetime import timedelta import logging from pyowm import OWM from pyowm.exceptions.api_call_error import APICallError import voluptuous as vol from homeassistant.components.weather import ( ATTR_FORECAST_CONDITION, ATTR_FORECAST_PRECIPITATION, ATTR_FORECAST_TEMP, ATTR_FORECAST_TEMP_LOW, ATTR_FORECAST_TIME, ATTR_FORECAST_WIND_BEARING, ATTR_FORECAST_WIND_SPEED, PLATFORM_SCHEMA, WeatherEntity, ) from homeassistant.const import ( CONF_API_KEY, CONF_LATITUDE, CONF_LONGITUDE, CONF_MODE, CONF_NAME, PRESSURE_HPA, PRESSURE_INHG, STATE_UNKNOWN, TEMP_CELSIUS, ) import homeassistant.helpers.config_validation as cv from homeassistant.util import Throttle from homeassistant.util.pressure import convert as convert_pressure _LOGGER = logging.getLogger(__name__) ATTRIBUTION = "Data provided by OpenWeatherMap" FORECAST_MODE = ["hourly", "daily", "freedaily"] DEFAULT_NAME = "OpenWeatherMap" MIN_TIME_BETWEEN_FORECAST_UPDATES = timedelta(minutes=30) MIN_TIME_BETWEEN_UPDATES = timedelta(minutes=10) CONDITION_CLASSES = { "cloudy": [803, 804], "fog": [701, 741], "hail": [906], "lightning": [210, 211, 212, 221], "lightning-rainy": [200, 201, 202, 230, 231, 232], "partlycloudy": [801, 802], "pouring": [504, 314, 502, 503, 522], "rainy": [300, 301, 302, 310, 311, 312, 313, 500, 501, 520, 521], "snowy": [600, 601, 602, 611, 612, 620, 621, 622], "snowy-rainy": [511, 615, 616], "sunny": [800], "windy": [905, 951, 952, 953, 954, 955, 956, 957], "windy-variant": [958, 959, 960, 961], "exceptional": [711, 721, 731, 751, 761, 762, 771, 900, 901, 962, 903, 904], } PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( { vol.Required(CONF_API_KEY): cv.string, vol.Optional(CONF_LATITUDE): cv.latitude, vol.Optional(CONF_LONGITUDE): cv.longitude, vol.Optional(CONF_MODE, default="hourly"): vol.In(FORECAST_MODE), vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string, } ) def setup_platform(hass, config, add_entities, discovery_info=None): """Set up the OpenWeatherMap weather platform.""" longitude = config.get(CONF_LONGITUDE, round(hass.config.longitude, 5)) latitude = config.get(CONF_LATITUDE, round(hass.config.latitude, 5)) name = config.get(CONF_NAME) mode = config.get(CONF_MODE) try: owm = OWM(config.get(CONF_API_KEY)) except APICallError: _LOGGER.error("Error while connecting to OpenWeatherMap") return False data = WeatherData(owm, latitude, longitude, mode) add_entities( [OpenWeatherMapWeather(name, data, hass.config.units.temperature_unit, mode)], True, ) class OpenWeatherMapWeather(WeatherEntity): """Implementation of an OpenWeatherMap sensor.""" def __init__(self, name, owm, temperature_unit, mode): """Initialize the sensor.""" self._name = name self._owm = owm self._temperature_unit = temperature_unit self._mode = mode self.data = None self.forecast_data = None @property def name(self): """Return the name of the sensor.""" return self._name @property def condition(self): """Return the current condition.""" try: return [ k for k, v in CONDITION_CLASSES.items() if self.data.get_weather_code() in v ][0] except IndexError: return STATE_UNKNOWN @property def temperature(self): """Return the temperature.""" return self.data.get_temperature("celsius").get("temp") @property def temperature_unit(self): """Return the unit of measurement.""" return TEMP_CELSIUS @property def pressure(self): """Return the pressure.""" pressure = self.data.get_pressure().get("press") if self.hass.config.units.name == "imperial": return round(convert_pressure(pressure, PRESSURE_HPA, PRESSURE_INHG), 2) return pressure @property def humidity(self): """Return the humidity.""" return self.data.get_humidity() @property def wind_speed(self): """Return the wind speed.""" if self.hass.config.units.name == "imperial": return round(self.data.get_wind().get("speed") * 2.24, 2) return round(self.data.get_wind().get("speed") * 3.6, 2) @property def wind_bearing(self): """Return the wind bearing.""" return self.data.get_wind().get("deg") @property def attribution(self): """Return the attribution.""" return ATTRIBUTION @property def forecast(self): """Return the forecast array.""" data = [] def calc_precipitation(rain, snow): """Calculate the precipitation.""" rain_value = 0 if rain is None else rain snow_value = 0 if snow is None else snow if round(rain_value + snow_value, 1) == 0: return None return round(rain_value + snow_value, 1) if self._mode == "freedaily": weather = self.forecast_data.get_weathers()[::8] else: weather = self.forecast_data.get_weathers() for entry in weather: if self._mode == "daily": data.append( { ATTR_FORECAST_TIME: entry.get_reference_time("unix") * 1000, ATTR_FORECAST_TEMP: entry.get_temperature("celsius").get("day"), ATTR_FORECAST_TEMP_LOW: entry.get_temperature("celsius").get( "night" ), ATTR_FORECAST_PRECIPITATION: calc_precipitation( entry.get_rain().get("all"), entry.get_snow().get("all") ), ATTR_FORECAST_WIND_SPEED: entry.get_wind().get("speed"), ATTR_FORECAST_WIND_BEARING: entry.get_wind().get("deg"), ATTR_FORECAST_CONDITION: [ k for k, v in CONDITION_CLASSES.items() if entry.get_weather_code() in v ][0], } ) else: data.append( { ATTR_FORECAST_TIME: entry.get_reference_time("unix") * 1000, ATTR_FORECAST_TEMP: entry.get_temperature("celsius").get( "temp" ), ATTR_FORECAST_PRECIPITATION: ( round(entry.get_rain().get("3h"), 1) if entry.get_rain().get("3h") is not None and (round(entry.get_rain().get("3h"), 1) > 0) else None ), ATTR_FORECAST_CONDITION: [ k for k, v in CONDITION_CLASSES.items() if entry.get_weather_code() in v ][0], } ) return data def update(self): """Get the latest data from OWM and updates the states.""" try: self._owm.update() self._owm.update_forecast() except APICallError: _LOGGER.error("Exception when calling OWM web API to update data") return self.data = self._owm.data self.forecast_data = self._owm.forecast_data class WeatherData: """Get the latest data from OpenWeatherMap.""" def __init__(self, owm, latitude, longitude, mode): """Initialize the data object.""" self._mode = mode self.owm = owm self.latitude = latitude self.longitude = longitude self.data = None self.forecast_data = None @Throttle(MIN_TIME_BETWEEN_UPDATES) def update(self): """Get the latest data from OpenWeatherMap.""" obs = self.owm.weather_at_coords(self.latitude, self.longitude) if obs is None: _LOGGER.warning("Failed to fetch data from OWM") return self.data = obs.get_weather() @Throttle(MIN_TIME_BETWEEN_FORECAST_UPDATES) def update_forecast(self): """Get the latest forecast from OpenWeatherMap.""" try: if self._mode == "daily": fcd = self.owm.daily_forecast_at_coords( self.latitude, self.longitude, 15 ) else: fcd = self.owm.three_hours_forecast_at_coords( self.latitude, self.longitude ) except APICallError: _LOGGER.error("Exception when calling OWM web API to update forecast") return if fcd is None: _LOGGER.warning("Failed to fetch forecast data from OWM") return self.forecast_data = fcd.get_forecast()
"""The tests for the hassio component.""" import os import pytest from homeassistant.auth.const import GROUP_ID_ADMIN from homeassistant.components import frontend from homeassistant.components.hassio import STORAGE_KEY from homeassistant.setup import async_setup_component from tests.async_mock import patch MOCK_ENVIRON = {"HASSIO": "127.0.0.1", "HASSIO_TOKEN": "abcdefgh"} @pytest.fixture(autouse=True) def mock_all(aioclient_mock): """Mock all setup requests.""" aioclient_mock.post("http://127.0.0.1/homeassistant/options", json={"result": "ok"}) aioclient_mock.get("http://127.0.0.1/supervisor/ping", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/supervisor/options", json={"result": "ok"}) aioclient_mock.get( "http://127.0.0.1/info", json={ "result": "ok", "data": {"supervisor": "222", "homeassistant": "0.110.0", "hassos": None}, }, ) aioclient_mock.get( "http://127.0.0.1/host/info", json={ "result": "ok", "data": { "result": "ok", "data": { "chassis": "vm", "operating_system": "Debian GNU/Linux 10 (buster)", "kernel": "4.19.0-6-amd64", }, }, }, ) aioclient_mock.get( "http://127.0.0.1/ingress/panels", json={"result": "ok", "data": {"panels": {}}} ) async def test_setup_api_ping(hass, aioclient_mock): """Test setup with API ping.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {}) assert result assert aioclient_mock.call_count == 6 assert hass.components.hassio.get_homeassistant_version() == "0.110.0" assert hass.components.hassio.is_hassio() async def test_setup_api_panel(hass, aioclient_mock): """Test setup with API ping.""" assert await async_setup_component(hass, "frontend", {}) with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {}) assert result panels = hass.data[frontend.DATA_PANELS] assert panels.get("hassio").to_response() == { "component_name": "custom", "icon": "hass:home-assistant", "title": "Supervisor", "url_path": "hassio", "require_admin": True, "config": { "_panel_custom": { "embed_iframe": True, "js_url": "/api/hassio/app/entrypoint.js", "name": "hassio-main", "trust_external": False, } }, } async def test_setup_api_push_api_data(hass, aioclient_mock): """Test setup with API push.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component( hass, "hassio", {"http": {"server_port": 9999}, "hassio": {}} ) assert result assert aioclient_mock.call_count == 6 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 9999 assert aioclient_mock.mock_calls[1][2]["watchdog"] async def test_setup_api_push_api_data_server_host(hass, aioclient_mock): """Test setup with API push with active server host.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component( hass, "hassio", {"http": {"server_port": 9999, "server_host": "127.0.0.1"}, "hassio": {}}, ) assert result assert aioclient_mock.call_count == 6 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 9999 assert not aioclient_mock.mock_calls[1][2]["watchdog"] async def test_setup_api_push_api_data_default(hass, aioclient_mock, hass_storage): """Test setup with API push default data.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"http": {}, "hassio": {}}) assert result assert aioclient_mock.call_count == 6 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 8123 refresh_token = aioclient_mock.mock_calls[1][2]["refresh_token"] hassio_user = await hass.auth.async_get_user( hass_storage[STORAGE_KEY]["data"]["hassio_user"] ) assert hassio_user is not None assert hassio_user.system_generated assert len(hassio_user.groups) == 1 assert hassio_user.groups[0].id == GROUP_ID_ADMIN for token in hassio_user.refresh_tokens.values(): if token.token == refresh_token: break else: assert False, "refresh token not found" async def test_setup_adds_admin_group_to_user(hass, aioclient_mock, hass_storage): """Test setup with API push default data.""" # Create user without admin user = await hass.auth.async_create_system_user("Hass.io") assert not user.is_admin await hass.auth.async_create_refresh_token(user) hass_storage[STORAGE_KEY] = { "data": {"hassio_user": user.id}, "key": STORAGE_KEY, "version": 1, } with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"http": {}, "hassio": {}}) assert result assert user.is_admin async def test_setup_api_existing_hassio_user(hass, aioclient_mock, hass_storage): """Test setup with API push default data.""" user = await hass.auth.async_create_system_user("Hass.io test") token = await hass.auth.async_create_refresh_token(user) hass_storage[STORAGE_KEY] = {"version": 1, "data": {"hassio_user": user.id}} with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"http": {}, "hassio": {}}) assert result assert aioclient_mock.call_count == 6 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 8123 assert aioclient_mock.mock_calls[1][2]["refresh_token"] == token.token async def test_setup_core_push_timezone(hass, aioclient_mock): """Test setup with API push default data.""" hass.config.time_zone = "testzone" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"hassio": {}}) assert result assert aioclient_mock.call_count == 6 assert aioclient_mock.mock_calls[2][2]["timezone"] == "testzone" await hass.config.async_update(time_zone="America/New_York") await hass.async_block_till_done() assert aioclient_mock.mock_calls[-1][2]["timezone"] == "America/New_York" async def test_setup_hassio_no_additional_data(hass, aioclient_mock): """Test setup with API push default data.""" with patch.dict(os.environ, MOCK_ENVIRON), patch.dict( os.environ, {"HASSIO_TOKEN": "123456"} ): result = await async_setup_component(hass, "hassio", {"hassio": {}}) assert result assert aioclient_mock.call_count == 6 assert aioclient_mock.mock_calls[-1][3]["X-Hassio-Key"] == "123456" async def test_fail_setup_without_environ_var(hass): """Fail setup if no environ variable set.""" with patch.dict(os.environ, {}, clear=True): result = await async_setup_component(hass, "hassio", {}) assert not result async def test_warn_when_cannot_connect(hass, caplog): """Fail warn when we cannot connect.""" with patch.dict(os.environ, MOCK_ENVIRON), patch( "homeassistant.components.hassio.HassIO.is_connected", return_value=None, ): result = await async_setup_component(hass, "hassio", {}) assert result assert hass.components.hassio.is_hassio() assert "Not connected with Hass.io / system too busy!" in caplog.text async def test_service_register(hassio_env, hass): """Check if service will be setup.""" assert await async_setup_component(hass, "hassio", {}) assert hass.services.has_service("hassio", "addon_start") assert hass.services.has_service("hassio", "addon_stop") assert hass.services.has_service("hassio", "addon_restart") assert hass.services.has_service("hassio", "addon_stdin") assert hass.services.has_service("hassio", "host_shutdown") assert hass.services.has_service("hassio", "host_reboot") assert hass.services.has_service("hassio", "host_reboot") assert hass.services.has_service("hassio", "snapshot_full") assert hass.services.has_service("hassio", "snapshot_partial") assert hass.services.has_service("hassio", "restore_full") assert hass.services.has_service("hassio", "restore_partial") async def test_service_calls(hassio_env, hass, aioclient_mock): """Call service and check the API calls behind that.""" assert await async_setup_component(hass, "hassio", {}) aioclient_mock.post("http://127.0.0.1/addons/test/start", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/addons/test/stop", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/addons/test/restart", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/addons/test/stdin", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/host/shutdown", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/host/reboot", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/snapshots/new/full", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/snapshots/new/partial", json={"result": "ok"}) aioclient_mock.post( "http://127.0.0.1/snapshots/test/restore/full", json={"result": "ok"} ) aioclient_mock.post( "http://127.0.0.1/snapshots/test/restore/partial", json={"result": "ok"} ) await hass.services.async_call("hassio", "addon_start", {"addon": "test"}) await hass.services.async_call("hassio", "addon_stop", {"addon": "test"}) await hass.services.async_call("hassio", "addon_restart", {"addon": "test"}) await hass.services.async_call( "hassio", "addon_stdin", {"addon": "test", "input": "test"} ) await hass.async_block_till_done() assert aioclient_mock.call_count == 7 assert aioclient_mock.mock_calls[-1][2] == "test" await hass.services.async_call("hassio", "host_shutdown", {}) await hass.services.async_call("hassio", "host_reboot", {}) await hass.async_block_till_done() assert aioclient_mock.call_count == 9 await hass.services.async_call("hassio", "snapshot_full", {}) await hass.services.async_call( "hassio", "snapshot_partial", {"addons": ["test"], "folders": ["ssl"], "password": "123456"}, ) await hass.async_block_till_done() assert aioclient_mock.call_count == 11 assert aioclient_mock.mock_calls[-1][2] == { "addons": ["test"], "folders": ["ssl"], "password": "123456", } await hass.services.async_call("hassio", "restore_full", {"snapshot": "test"}) await hass.services.async_call( "hassio", "restore_partial", { "snapshot": "test", "homeassistant": False, "addons": ["test"], "folders": ["ssl"], "password": "123456", }, ) await hass.async_block_till_done() assert aioclient_mock.call_count == 13 assert aioclient_mock.mock_calls[-1][2] == { "addons": ["test"], "folders": ["ssl"], "homeassistant": False, "password": "123456", } async def test_service_calls_core(hassio_env, hass, aioclient_mock): """Call core service and check the API calls behind that.""" assert await async_setup_component(hass, "hassio", {}) aioclient_mock.post("http://127.0.0.1/homeassistant/restart", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/homeassistant/stop", json={"result": "ok"}) await hass.services.async_call("homeassistant", "stop") await hass.async_block_till_done() assert aioclient_mock.call_count == 4 await hass.services.async_call("homeassistant", "check_config") await hass.async_block_till_done() assert aioclient_mock.call_count == 4 with patch( "homeassistant.config.async_check_ha_config_file", return_value=None ) as mock_check_config: await hass.services.async_call("homeassistant", "restart") await hass.async_block_till_done() assert mock_check_config.called assert aioclient_mock.call_count == 5
pschmitt/home-assistant
tests/components/hassio/test_init.py
homeassistant/components/openweathermap/weather.py
"""Syslog notification service.""" import logging import syslog import voluptuous as vol from homeassistant.components.notify import ( ATTR_TITLE, ATTR_TITLE_DEFAULT, PLATFORM_SCHEMA, BaseNotificationService, ) _LOGGER = logging.getLogger(__name__) CONF_FACILITY = "facility" CONF_OPTION = "option" CONF_PRIORITY = "priority" SYSLOG_FACILITY = { "kernel": "LOG_KERN", "user": "LOG_USER", "mail": "LOG_MAIL", "daemon": "LOG_DAEMON", "auth": "LOG_KERN", "LPR": "LOG_LPR", "news": "LOG_NEWS", "uucp": "LOG_UUCP", "cron": "LOG_CRON", "syslog": "LOG_SYSLOG", "local0": "LOG_LOCAL0", "local1": "LOG_LOCAL1", "local2": "LOG_LOCAL2", "local3": "LOG_LOCAL3", "local4": "LOG_LOCAL4", "local5": "LOG_LOCAL5", "local6": "LOG_LOCAL6", "local7": "LOG_LOCAL7", } SYSLOG_OPTION = { "pid": "LOG_PID", "cons": "LOG_CONS", "ndelay": "LOG_NDELAY", "nowait": "LOG_NOWAIT", "perror": "LOG_PERROR", } SYSLOG_PRIORITY = { 5: "LOG_EMERG", 4: "LOG_ALERT", 3: "LOG_CRIT", 2: "LOG_ERR", 1: "LOG_WARNING", 0: "LOG_NOTICE", -1: "LOG_INFO", -2: "LOG_DEBUG", } PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( { vol.Optional(CONF_FACILITY, default="syslog"): vol.In(SYSLOG_FACILITY.keys()), vol.Optional(CONF_OPTION, default="pid"): vol.In(SYSLOG_OPTION.keys()), vol.Optional(CONF_PRIORITY, default=-1): vol.In(SYSLOG_PRIORITY.keys()), } ) def get_service(hass, config, discovery_info=None): """Get the syslog notification service.""" facility = getattr(syslog, SYSLOG_FACILITY[config.get(CONF_FACILITY)]) option = getattr(syslog, SYSLOG_OPTION[config.get(CONF_OPTION)]) priority = getattr(syslog, SYSLOG_PRIORITY[config.get(CONF_PRIORITY)]) return SyslogNotificationService(facility, option, priority) class SyslogNotificationService(BaseNotificationService): """Implement the syslog notification service.""" def __init__(self, facility, option, priority): """Initialize the service.""" self._facility = facility self._option = option self._priority = priority def send_message(self, message="", **kwargs): """Send a message to a user.""" title = kwargs.get(ATTR_TITLE, ATTR_TITLE_DEFAULT) syslog.openlog(title, self._option, self._facility) syslog.syslog(self._priority, message) syslog.closelog()
"""The tests for the hassio component.""" import os import pytest from homeassistant.auth.const import GROUP_ID_ADMIN from homeassistant.components import frontend from homeassistant.components.hassio import STORAGE_KEY from homeassistant.setup import async_setup_component from tests.async_mock import patch MOCK_ENVIRON = {"HASSIO": "127.0.0.1", "HASSIO_TOKEN": "abcdefgh"} @pytest.fixture(autouse=True) def mock_all(aioclient_mock): """Mock all setup requests.""" aioclient_mock.post("http://127.0.0.1/homeassistant/options", json={"result": "ok"}) aioclient_mock.get("http://127.0.0.1/supervisor/ping", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/supervisor/options", json={"result": "ok"}) aioclient_mock.get( "http://127.0.0.1/info", json={ "result": "ok", "data": {"supervisor": "222", "homeassistant": "0.110.0", "hassos": None}, }, ) aioclient_mock.get( "http://127.0.0.1/host/info", json={ "result": "ok", "data": { "result": "ok", "data": { "chassis": "vm", "operating_system": "Debian GNU/Linux 10 (buster)", "kernel": "4.19.0-6-amd64", }, }, }, ) aioclient_mock.get( "http://127.0.0.1/ingress/panels", json={"result": "ok", "data": {"panels": {}}} ) async def test_setup_api_ping(hass, aioclient_mock): """Test setup with API ping.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {}) assert result assert aioclient_mock.call_count == 6 assert hass.components.hassio.get_homeassistant_version() == "0.110.0" assert hass.components.hassio.is_hassio() async def test_setup_api_panel(hass, aioclient_mock): """Test setup with API ping.""" assert await async_setup_component(hass, "frontend", {}) with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {}) assert result panels = hass.data[frontend.DATA_PANELS] assert panels.get("hassio").to_response() == { "component_name": "custom", "icon": "hass:home-assistant", "title": "Supervisor", "url_path": "hassio", "require_admin": True, "config": { "_panel_custom": { "embed_iframe": True, "js_url": "/api/hassio/app/entrypoint.js", "name": "hassio-main", "trust_external": False, } }, } async def test_setup_api_push_api_data(hass, aioclient_mock): """Test setup with API push.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component( hass, "hassio", {"http": {"server_port": 9999}, "hassio": {}} ) assert result assert aioclient_mock.call_count == 6 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 9999 assert aioclient_mock.mock_calls[1][2]["watchdog"] async def test_setup_api_push_api_data_server_host(hass, aioclient_mock): """Test setup with API push with active server host.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component( hass, "hassio", {"http": {"server_port": 9999, "server_host": "127.0.0.1"}, "hassio": {}}, ) assert result assert aioclient_mock.call_count == 6 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 9999 assert not aioclient_mock.mock_calls[1][2]["watchdog"] async def test_setup_api_push_api_data_default(hass, aioclient_mock, hass_storage): """Test setup with API push default data.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"http": {}, "hassio": {}}) assert result assert aioclient_mock.call_count == 6 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 8123 refresh_token = aioclient_mock.mock_calls[1][2]["refresh_token"] hassio_user = await hass.auth.async_get_user( hass_storage[STORAGE_KEY]["data"]["hassio_user"] ) assert hassio_user is not None assert hassio_user.system_generated assert len(hassio_user.groups) == 1 assert hassio_user.groups[0].id == GROUP_ID_ADMIN for token in hassio_user.refresh_tokens.values(): if token.token == refresh_token: break else: assert False, "refresh token not found" async def test_setup_adds_admin_group_to_user(hass, aioclient_mock, hass_storage): """Test setup with API push default data.""" # Create user without admin user = await hass.auth.async_create_system_user("Hass.io") assert not user.is_admin await hass.auth.async_create_refresh_token(user) hass_storage[STORAGE_KEY] = { "data": {"hassio_user": user.id}, "key": STORAGE_KEY, "version": 1, } with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"http": {}, "hassio": {}}) assert result assert user.is_admin async def test_setup_api_existing_hassio_user(hass, aioclient_mock, hass_storage): """Test setup with API push default data.""" user = await hass.auth.async_create_system_user("Hass.io test") token = await hass.auth.async_create_refresh_token(user) hass_storage[STORAGE_KEY] = {"version": 1, "data": {"hassio_user": user.id}} with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"http": {}, "hassio": {}}) assert result assert aioclient_mock.call_count == 6 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 8123 assert aioclient_mock.mock_calls[1][2]["refresh_token"] == token.token async def test_setup_core_push_timezone(hass, aioclient_mock): """Test setup with API push default data.""" hass.config.time_zone = "testzone" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"hassio": {}}) assert result assert aioclient_mock.call_count == 6 assert aioclient_mock.mock_calls[2][2]["timezone"] == "testzone" await hass.config.async_update(time_zone="America/New_York") await hass.async_block_till_done() assert aioclient_mock.mock_calls[-1][2]["timezone"] == "America/New_York" async def test_setup_hassio_no_additional_data(hass, aioclient_mock): """Test setup with API push default data.""" with patch.dict(os.environ, MOCK_ENVIRON), patch.dict( os.environ, {"HASSIO_TOKEN": "123456"} ): result = await async_setup_component(hass, "hassio", {"hassio": {}}) assert result assert aioclient_mock.call_count == 6 assert aioclient_mock.mock_calls[-1][3]["X-Hassio-Key"] == "123456" async def test_fail_setup_without_environ_var(hass): """Fail setup if no environ variable set.""" with patch.dict(os.environ, {}, clear=True): result = await async_setup_component(hass, "hassio", {}) assert not result async def test_warn_when_cannot_connect(hass, caplog): """Fail warn when we cannot connect.""" with patch.dict(os.environ, MOCK_ENVIRON), patch( "homeassistant.components.hassio.HassIO.is_connected", return_value=None, ): result = await async_setup_component(hass, "hassio", {}) assert result assert hass.components.hassio.is_hassio() assert "Not connected with Hass.io / system too busy!" in caplog.text async def test_service_register(hassio_env, hass): """Check if service will be setup.""" assert await async_setup_component(hass, "hassio", {}) assert hass.services.has_service("hassio", "addon_start") assert hass.services.has_service("hassio", "addon_stop") assert hass.services.has_service("hassio", "addon_restart") assert hass.services.has_service("hassio", "addon_stdin") assert hass.services.has_service("hassio", "host_shutdown") assert hass.services.has_service("hassio", "host_reboot") assert hass.services.has_service("hassio", "host_reboot") assert hass.services.has_service("hassio", "snapshot_full") assert hass.services.has_service("hassio", "snapshot_partial") assert hass.services.has_service("hassio", "restore_full") assert hass.services.has_service("hassio", "restore_partial") async def test_service_calls(hassio_env, hass, aioclient_mock): """Call service and check the API calls behind that.""" assert await async_setup_component(hass, "hassio", {}) aioclient_mock.post("http://127.0.0.1/addons/test/start", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/addons/test/stop", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/addons/test/restart", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/addons/test/stdin", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/host/shutdown", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/host/reboot", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/snapshots/new/full", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/snapshots/new/partial", json={"result": "ok"}) aioclient_mock.post( "http://127.0.0.1/snapshots/test/restore/full", json={"result": "ok"} ) aioclient_mock.post( "http://127.0.0.1/snapshots/test/restore/partial", json={"result": "ok"} ) await hass.services.async_call("hassio", "addon_start", {"addon": "test"}) await hass.services.async_call("hassio", "addon_stop", {"addon": "test"}) await hass.services.async_call("hassio", "addon_restart", {"addon": "test"}) await hass.services.async_call( "hassio", "addon_stdin", {"addon": "test", "input": "test"} ) await hass.async_block_till_done() assert aioclient_mock.call_count == 7 assert aioclient_mock.mock_calls[-1][2] == "test" await hass.services.async_call("hassio", "host_shutdown", {}) await hass.services.async_call("hassio", "host_reboot", {}) await hass.async_block_till_done() assert aioclient_mock.call_count == 9 await hass.services.async_call("hassio", "snapshot_full", {}) await hass.services.async_call( "hassio", "snapshot_partial", {"addons": ["test"], "folders": ["ssl"], "password": "123456"}, ) await hass.async_block_till_done() assert aioclient_mock.call_count == 11 assert aioclient_mock.mock_calls[-1][2] == { "addons": ["test"], "folders": ["ssl"], "password": "123456", } await hass.services.async_call("hassio", "restore_full", {"snapshot": "test"}) await hass.services.async_call( "hassio", "restore_partial", { "snapshot": "test", "homeassistant": False, "addons": ["test"], "folders": ["ssl"], "password": "123456", }, ) await hass.async_block_till_done() assert aioclient_mock.call_count == 13 assert aioclient_mock.mock_calls[-1][2] == { "addons": ["test"], "folders": ["ssl"], "homeassistant": False, "password": "123456", } async def test_service_calls_core(hassio_env, hass, aioclient_mock): """Call core service and check the API calls behind that.""" assert await async_setup_component(hass, "hassio", {}) aioclient_mock.post("http://127.0.0.1/homeassistant/restart", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/homeassistant/stop", json={"result": "ok"}) await hass.services.async_call("homeassistant", "stop") await hass.async_block_till_done() assert aioclient_mock.call_count == 4 await hass.services.async_call("homeassistant", "check_config") await hass.async_block_till_done() assert aioclient_mock.call_count == 4 with patch( "homeassistant.config.async_check_ha_config_file", return_value=None ) as mock_check_config: await hass.services.async_call("homeassistant", "restart") await hass.async_block_till_done() assert mock_check_config.called assert aioclient_mock.call_count == 5
pschmitt/home-assistant
tests/components/hassio/test_init.py
homeassistant/components/syslog/notify.py
"""Config flow to configure the RainMachine component.""" from regenmaschine import login from regenmaschine.errors import RainMachineError import voluptuous as vol from homeassistant import config_entries from homeassistant.const import ( CONF_IP_ADDRESS, CONF_PASSWORD, CONF_PORT, CONF_SCAN_INTERVAL, CONF_SSL, ) from homeassistant.helpers import aiohttp_client from .const import ( # pylint: disable=unused-import CONF_ZONE_RUN_TIME, DEFAULT_PORT, DEFAULT_SCAN_INTERVAL, DEFAULT_ZONE_RUN, DOMAIN, ) class RainMachineFlowHandler(config_entries.ConfigFlow, domain=DOMAIN): """Handle a RainMachine config flow.""" VERSION = 1 CONNECTION_CLASS = config_entries.CONN_CLASS_LOCAL_POLL def __init__(self): """Initialize the config flow.""" self.data_schema = vol.Schema( { vol.Required(CONF_IP_ADDRESS): str, vol.Required(CONF_PASSWORD): str, vol.Optional(CONF_PORT, default=DEFAULT_PORT): int, } ) async def _show_form(self, errors=None): """Show the form to the user.""" return self.async_show_form( step_id="user", data_schema=self.data_schema, errors=errors if errors else {}, ) async def async_step_import(self, import_config): """Import a config entry from configuration.yaml.""" return await self.async_step_user(import_config) async def async_step_user(self, user_input=None): """Handle the start of the config flow.""" if not user_input: return await self._show_form() await self.async_set_unique_id(user_input[CONF_IP_ADDRESS]) self._abort_if_unique_id_configured() websession = aiohttp_client.async_get_clientsession(self.hass) try: await login( user_input[CONF_IP_ADDRESS], user_input[CONF_PASSWORD], websession, port=user_input[CONF_PORT], ssl=user_input.get(CONF_SSL, True), ) except RainMachineError: return await self._show_form({CONF_PASSWORD: "invalid_credentials"}) # Unfortunately, RainMachine doesn't provide a way to refresh the # access token without using the IP address and password, so we have to # store it: return self.async_create_entry( title=user_input[CONF_IP_ADDRESS], data={ CONF_IP_ADDRESS: user_input[CONF_IP_ADDRESS], CONF_PASSWORD: user_input[CONF_PASSWORD], CONF_PORT: user_input[CONF_PORT], CONF_SSL: user_input.get(CONF_SSL, True), CONF_SCAN_INTERVAL: user_input.get( CONF_SCAN_INTERVAL, DEFAULT_SCAN_INTERVAL.total_seconds() ), CONF_ZONE_RUN_TIME: user_input.get( CONF_ZONE_RUN_TIME, DEFAULT_ZONE_RUN ), }, )
"""The tests for the hassio component.""" import os import pytest from homeassistant.auth.const import GROUP_ID_ADMIN from homeassistant.components import frontend from homeassistant.components.hassio import STORAGE_KEY from homeassistant.setup import async_setup_component from tests.async_mock import patch MOCK_ENVIRON = {"HASSIO": "127.0.0.1", "HASSIO_TOKEN": "abcdefgh"} @pytest.fixture(autouse=True) def mock_all(aioclient_mock): """Mock all setup requests.""" aioclient_mock.post("http://127.0.0.1/homeassistant/options", json={"result": "ok"}) aioclient_mock.get("http://127.0.0.1/supervisor/ping", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/supervisor/options", json={"result": "ok"}) aioclient_mock.get( "http://127.0.0.1/info", json={ "result": "ok", "data": {"supervisor": "222", "homeassistant": "0.110.0", "hassos": None}, }, ) aioclient_mock.get( "http://127.0.0.1/host/info", json={ "result": "ok", "data": { "result": "ok", "data": { "chassis": "vm", "operating_system": "Debian GNU/Linux 10 (buster)", "kernel": "4.19.0-6-amd64", }, }, }, ) aioclient_mock.get( "http://127.0.0.1/ingress/panels", json={"result": "ok", "data": {"panels": {}}} ) async def test_setup_api_ping(hass, aioclient_mock): """Test setup with API ping.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {}) assert result assert aioclient_mock.call_count == 6 assert hass.components.hassio.get_homeassistant_version() == "0.110.0" assert hass.components.hassio.is_hassio() async def test_setup_api_panel(hass, aioclient_mock): """Test setup with API ping.""" assert await async_setup_component(hass, "frontend", {}) with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {}) assert result panels = hass.data[frontend.DATA_PANELS] assert panels.get("hassio").to_response() == { "component_name": "custom", "icon": "hass:home-assistant", "title": "Supervisor", "url_path": "hassio", "require_admin": True, "config": { "_panel_custom": { "embed_iframe": True, "js_url": "/api/hassio/app/entrypoint.js", "name": "hassio-main", "trust_external": False, } }, } async def test_setup_api_push_api_data(hass, aioclient_mock): """Test setup with API push.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component( hass, "hassio", {"http": {"server_port": 9999}, "hassio": {}} ) assert result assert aioclient_mock.call_count == 6 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 9999 assert aioclient_mock.mock_calls[1][2]["watchdog"] async def test_setup_api_push_api_data_server_host(hass, aioclient_mock): """Test setup with API push with active server host.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component( hass, "hassio", {"http": {"server_port": 9999, "server_host": "127.0.0.1"}, "hassio": {}}, ) assert result assert aioclient_mock.call_count == 6 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 9999 assert not aioclient_mock.mock_calls[1][2]["watchdog"] async def test_setup_api_push_api_data_default(hass, aioclient_mock, hass_storage): """Test setup with API push default data.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"http": {}, "hassio": {}}) assert result assert aioclient_mock.call_count == 6 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 8123 refresh_token = aioclient_mock.mock_calls[1][2]["refresh_token"] hassio_user = await hass.auth.async_get_user( hass_storage[STORAGE_KEY]["data"]["hassio_user"] ) assert hassio_user is not None assert hassio_user.system_generated assert len(hassio_user.groups) == 1 assert hassio_user.groups[0].id == GROUP_ID_ADMIN for token in hassio_user.refresh_tokens.values(): if token.token == refresh_token: break else: assert False, "refresh token not found" async def test_setup_adds_admin_group_to_user(hass, aioclient_mock, hass_storage): """Test setup with API push default data.""" # Create user without admin user = await hass.auth.async_create_system_user("Hass.io") assert not user.is_admin await hass.auth.async_create_refresh_token(user) hass_storage[STORAGE_KEY] = { "data": {"hassio_user": user.id}, "key": STORAGE_KEY, "version": 1, } with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"http": {}, "hassio": {}}) assert result assert user.is_admin async def test_setup_api_existing_hassio_user(hass, aioclient_mock, hass_storage): """Test setup with API push default data.""" user = await hass.auth.async_create_system_user("Hass.io test") token = await hass.auth.async_create_refresh_token(user) hass_storage[STORAGE_KEY] = {"version": 1, "data": {"hassio_user": user.id}} with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"http": {}, "hassio": {}}) assert result assert aioclient_mock.call_count == 6 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 8123 assert aioclient_mock.mock_calls[1][2]["refresh_token"] == token.token async def test_setup_core_push_timezone(hass, aioclient_mock): """Test setup with API push default data.""" hass.config.time_zone = "testzone" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"hassio": {}}) assert result assert aioclient_mock.call_count == 6 assert aioclient_mock.mock_calls[2][2]["timezone"] == "testzone" await hass.config.async_update(time_zone="America/New_York") await hass.async_block_till_done() assert aioclient_mock.mock_calls[-1][2]["timezone"] == "America/New_York" async def test_setup_hassio_no_additional_data(hass, aioclient_mock): """Test setup with API push default data.""" with patch.dict(os.environ, MOCK_ENVIRON), patch.dict( os.environ, {"HASSIO_TOKEN": "123456"} ): result = await async_setup_component(hass, "hassio", {"hassio": {}}) assert result assert aioclient_mock.call_count == 6 assert aioclient_mock.mock_calls[-1][3]["X-Hassio-Key"] == "123456" async def test_fail_setup_without_environ_var(hass): """Fail setup if no environ variable set.""" with patch.dict(os.environ, {}, clear=True): result = await async_setup_component(hass, "hassio", {}) assert not result async def test_warn_when_cannot_connect(hass, caplog): """Fail warn when we cannot connect.""" with patch.dict(os.environ, MOCK_ENVIRON), patch( "homeassistant.components.hassio.HassIO.is_connected", return_value=None, ): result = await async_setup_component(hass, "hassio", {}) assert result assert hass.components.hassio.is_hassio() assert "Not connected with Hass.io / system too busy!" in caplog.text async def test_service_register(hassio_env, hass): """Check if service will be setup.""" assert await async_setup_component(hass, "hassio", {}) assert hass.services.has_service("hassio", "addon_start") assert hass.services.has_service("hassio", "addon_stop") assert hass.services.has_service("hassio", "addon_restart") assert hass.services.has_service("hassio", "addon_stdin") assert hass.services.has_service("hassio", "host_shutdown") assert hass.services.has_service("hassio", "host_reboot") assert hass.services.has_service("hassio", "host_reboot") assert hass.services.has_service("hassio", "snapshot_full") assert hass.services.has_service("hassio", "snapshot_partial") assert hass.services.has_service("hassio", "restore_full") assert hass.services.has_service("hassio", "restore_partial") async def test_service_calls(hassio_env, hass, aioclient_mock): """Call service and check the API calls behind that.""" assert await async_setup_component(hass, "hassio", {}) aioclient_mock.post("http://127.0.0.1/addons/test/start", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/addons/test/stop", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/addons/test/restart", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/addons/test/stdin", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/host/shutdown", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/host/reboot", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/snapshots/new/full", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/snapshots/new/partial", json={"result": "ok"}) aioclient_mock.post( "http://127.0.0.1/snapshots/test/restore/full", json={"result": "ok"} ) aioclient_mock.post( "http://127.0.0.1/snapshots/test/restore/partial", json={"result": "ok"} ) await hass.services.async_call("hassio", "addon_start", {"addon": "test"}) await hass.services.async_call("hassio", "addon_stop", {"addon": "test"}) await hass.services.async_call("hassio", "addon_restart", {"addon": "test"}) await hass.services.async_call( "hassio", "addon_stdin", {"addon": "test", "input": "test"} ) await hass.async_block_till_done() assert aioclient_mock.call_count == 7 assert aioclient_mock.mock_calls[-1][2] == "test" await hass.services.async_call("hassio", "host_shutdown", {}) await hass.services.async_call("hassio", "host_reboot", {}) await hass.async_block_till_done() assert aioclient_mock.call_count == 9 await hass.services.async_call("hassio", "snapshot_full", {}) await hass.services.async_call( "hassio", "snapshot_partial", {"addons": ["test"], "folders": ["ssl"], "password": "123456"}, ) await hass.async_block_till_done() assert aioclient_mock.call_count == 11 assert aioclient_mock.mock_calls[-1][2] == { "addons": ["test"], "folders": ["ssl"], "password": "123456", } await hass.services.async_call("hassio", "restore_full", {"snapshot": "test"}) await hass.services.async_call( "hassio", "restore_partial", { "snapshot": "test", "homeassistant": False, "addons": ["test"], "folders": ["ssl"], "password": "123456", }, ) await hass.async_block_till_done() assert aioclient_mock.call_count == 13 assert aioclient_mock.mock_calls[-1][2] == { "addons": ["test"], "folders": ["ssl"], "homeassistant": False, "password": "123456", } async def test_service_calls_core(hassio_env, hass, aioclient_mock): """Call core service and check the API calls behind that.""" assert await async_setup_component(hass, "hassio", {}) aioclient_mock.post("http://127.0.0.1/homeassistant/restart", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/homeassistant/stop", json={"result": "ok"}) await hass.services.async_call("homeassistant", "stop") await hass.async_block_till_done() assert aioclient_mock.call_count == 4 await hass.services.async_call("homeassistant", "check_config") await hass.async_block_till_done() assert aioclient_mock.call_count == 4 with patch( "homeassistant.config.async_check_ha_config_file", return_value=None ) as mock_check_config: await hass.services.async_call("homeassistant", "restart") await hass.async_block_till_done() assert mock_check_config.called assert aioclient_mock.call_count == 5
pschmitt/home-assistant
tests/components/hassio/test_init.py
homeassistant/components/rainmachine/config_flow.py
"""Tracks the latency of a host by sending ICMP echo requests (ping).""" from datetime import timedelta import logging import re import subprocess import sys import voluptuous as vol from homeassistant.components.binary_sensor import PLATFORM_SCHEMA, BinarySensorEntity from homeassistant.const import CONF_HOST, CONF_NAME import homeassistant.helpers.config_validation as cv _LOGGER = logging.getLogger(__name__) ATTR_ROUND_TRIP_TIME_AVG = "round_trip_time_avg" ATTR_ROUND_TRIP_TIME_MAX = "round_trip_time_max" ATTR_ROUND_TRIP_TIME_MDEV = "round_trip_time_mdev" ATTR_ROUND_TRIP_TIME_MIN = "round_trip_time_min" CONF_PING_COUNT = "count" DEFAULT_NAME = "Ping Binary sensor" DEFAULT_PING_COUNT = 5 DEFAULT_DEVICE_CLASS = "connectivity" SCAN_INTERVAL = timedelta(minutes=5) PING_MATCHER = re.compile( r"(?P<min>\d+.\d+)\/(?P<avg>\d+.\d+)\/(?P<max>\d+.\d+)\/(?P<mdev>\d+.\d+)" ) PING_MATCHER_BUSYBOX = re.compile( r"(?P<min>\d+.\d+)\/(?P<avg>\d+.\d+)\/(?P<max>\d+.\d+)" ) WIN32_PING_MATCHER = re.compile(r"(?P<min>\d+)ms.+(?P<max>\d+)ms.+(?P<avg>\d+)ms") PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( { vol.Required(CONF_HOST): cv.string, vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string, vol.Optional(CONF_PING_COUNT, default=DEFAULT_PING_COUNT): cv.positive_int, } ) def setup_platform(hass, config, add_entities, discovery_info=None): """Set up the Ping Binary sensor.""" name = config.get(CONF_NAME) host = config.get(CONF_HOST) count = config.get(CONF_PING_COUNT) add_entities([PingBinarySensor(name, PingData(host, count))], True) class PingBinarySensor(BinarySensorEntity): """Representation of a Ping Binary sensor.""" def __init__(self, name, ping): """Initialize the Ping Binary sensor.""" self._name = name self.ping = ping @property def name(self): """Return the name of the device.""" return self._name @property def device_class(self): """Return the class of this sensor.""" return DEFAULT_DEVICE_CLASS @property def is_on(self): """Return true if the binary sensor is on.""" return self.ping.available @property def device_state_attributes(self): """Return the state attributes of the ICMP checo request.""" if self.ping.data is not False: return { ATTR_ROUND_TRIP_TIME_AVG: self.ping.data["avg"], ATTR_ROUND_TRIP_TIME_MAX: self.ping.data["max"], ATTR_ROUND_TRIP_TIME_MDEV: self.ping.data["mdev"], ATTR_ROUND_TRIP_TIME_MIN: self.ping.data["min"], } def update(self): """Get the latest data.""" self.ping.update() class PingData: """The Class for handling the data retrieval.""" def __init__(self, host, count): """Initialize the data object.""" self._ip_address = host self._count = count self.data = {} self.available = False if sys.platform == "win32": self._ping_cmd = [ "ping", "-n", str(self._count), "-w", "1000", self._ip_address, ] else: self._ping_cmd = [ "ping", "-n", "-q", "-c", str(self._count), "-W1", self._ip_address, ] def ping(self): """Send ICMP echo request and return details if success.""" pinger = subprocess.Popen( self._ping_cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE ) try: out = pinger.communicate() _LOGGER.debug("Output is %s", str(out)) if sys.platform == "win32": match = WIN32_PING_MATCHER.search(str(out).split("\n")[-1]) rtt_min, rtt_avg, rtt_max = match.groups() return {"min": rtt_min, "avg": rtt_avg, "max": rtt_max, "mdev": ""} if "max/" not in str(out): match = PING_MATCHER_BUSYBOX.search(str(out).split("\n")[-1]) rtt_min, rtt_avg, rtt_max = match.groups() return {"min": rtt_min, "avg": rtt_avg, "max": rtt_max, "mdev": ""} match = PING_MATCHER.search(str(out).split("\n")[-1]) rtt_min, rtt_avg, rtt_max, rtt_mdev = match.groups() return {"min": rtt_min, "avg": rtt_avg, "max": rtt_max, "mdev": rtt_mdev} except (subprocess.CalledProcessError, AttributeError): return False def update(self): """Retrieve the latest details from the host.""" self.data = self.ping() self.available = bool(self.data)
"""The tests for the hassio component.""" import os import pytest from homeassistant.auth.const import GROUP_ID_ADMIN from homeassistant.components import frontend from homeassistant.components.hassio import STORAGE_KEY from homeassistant.setup import async_setup_component from tests.async_mock import patch MOCK_ENVIRON = {"HASSIO": "127.0.0.1", "HASSIO_TOKEN": "abcdefgh"} @pytest.fixture(autouse=True) def mock_all(aioclient_mock): """Mock all setup requests.""" aioclient_mock.post("http://127.0.0.1/homeassistant/options", json={"result": "ok"}) aioclient_mock.get("http://127.0.0.1/supervisor/ping", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/supervisor/options", json={"result": "ok"}) aioclient_mock.get( "http://127.0.0.1/info", json={ "result": "ok", "data": {"supervisor": "222", "homeassistant": "0.110.0", "hassos": None}, }, ) aioclient_mock.get( "http://127.0.0.1/host/info", json={ "result": "ok", "data": { "result": "ok", "data": { "chassis": "vm", "operating_system": "Debian GNU/Linux 10 (buster)", "kernel": "4.19.0-6-amd64", }, }, }, ) aioclient_mock.get( "http://127.0.0.1/ingress/panels", json={"result": "ok", "data": {"panels": {}}} ) async def test_setup_api_ping(hass, aioclient_mock): """Test setup with API ping.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {}) assert result assert aioclient_mock.call_count == 6 assert hass.components.hassio.get_homeassistant_version() == "0.110.0" assert hass.components.hassio.is_hassio() async def test_setup_api_panel(hass, aioclient_mock): """Test setup with API ping.""" assert await async_setup_component(hass, "frontend", {}) with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {}) assert result panels = hass.data[frontend.DATA_PANELS] assert panels.get("hassio").to_response() == { "component_name": "custom", "icon": "hass:home-assistant", "title": "Supervisor", "url_path": "hassio", "require_admin": True, "config": { "_panel_custom": { "embed_iframe": True, "js_url": "/api/hassio/app/entrypoint.js", "name": "hassio-main", "trust_external": False, } }, } async def test_setup_api_push_api_data(hass, aioclient_mock): """Test setup with API push.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component( hass, "hassio", {"http": {"server_port": 9999}, "hassio": {}} ) assert result assert aioclient_mock.call_count == 6 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 9999 assert aioclient_mock.mock_calls[1][2]["watchdog"] async def test_setup_api_push_api_data_server_host(hass, aioclient_mock): """Test setup with API push with active server host.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component( hass, "hassio", {"http": {"server_port": 9999, "server_host": "127.0.0.1"}, "hassio": {}}, ) assert result assert aioclient_mock.call_count == 6 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 9999 assert not aioclient_mock.mock_calls[1][2]["watchdog"] async def test_setup_api_push_api_data_default(hass, aioclient_mock, hass_storage): """Test setup with API push default data.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"http": {}, "hassio": {}}) assert result assert aioclient_mock.call_count == 6 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 8123 refresh_token = aioclient_mock.mock_calls[1][2]["refresh_token"] hassio_user = await hass.auth.async_get_user( hass_storage[STORAGE_KEY]["data"]["hassio_user"] ) assert hassio_user is not None assert hassio_user.system_generated assert len(hassio_user.groups) == 1 assert hassio_user.groups[0].id == GROUP_ID_ADMIN for token in hassio_user.refresh_tokens.values(): if token.token == refresh_token: break else: assert False, "refresh token not found" async def test_setup_adds_admin_group_to_user(hass, aioclient_mock, hass_storage): """Test setup with API push default data.""" # Create user without admin user = await hass.auth.async_create_system_user("Hass.io") assert not user.is_admin await hass.auth.async_create_refresh_token(user) hass_storage[STORAGE_KEY] = { "data": {"hassio_user": user.id}, "key": STORAGE_KEY, "version": 1, } with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"http": {}, "hassio": {}}) assert result assert user.is_admin async def test_setup_api_existing_hassio_user(hass, aioclient_mock, hass_storage): """Test setup with API push default data.""" user = await hass.auth.async_create_system_user("Hass.io test") token = await hass.auth.async_create_refresh_token(user) hass_storage[STORAGE_KEY] = {"version": 1, "data": {"hassio_user": user.id}} with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"http": {}, "hassio": {}}) assert result assert aioclient_mock.call_count == 6 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 8123 assert aioclient_mock.mock_calls[1][2]["refresh_token"] == token.token async def test_setup_core_push_timezone(hass, aioclient_mock): """Test setup with API push default data.""" hass.config.time_zone = "testzone" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"hassio": {}}) assert result assert aioclient_mock.call_count == 6 assert aioclient_mock.mock_calls[2][2]["timezone"] == "testzone" await hass.config.async_update(time_zone="America/New_York") await hass.async_block_till_done() assert aioclient_mock.mock_calls[-1][2]["timezone"] == "America/New_York" async def test_setup_hassio_no_additional_data(hass, aioclient_mock): """Test setup with API push default data.""" with patch.dict(os.environ, MOCK_ENVIRON), patch.dict( os.environ, {"HASSIO_TOKEN": "123456"} ): result = await async_setup_component(hass, "hassio", {"hassio": {}}) assert result assert aioclient_mock.call_count == 6 assert aioclient_mock.mock_calls[-1][3]["X-Hassio-Key"] == "123456" async def test_fail_setup_without_environ_var(hass): """Fail setup if no environ variable set.""" with patch.dict(os.environ, {}, clear=True): result = await async_setup_component(hass, "hassio", {}) assert not result async def test_warn_when_cannot_connect(hass, caplog): """Fail warn when we cannot connect.""" with patch.dict(os.environ, MOCK_ENVIRON), patch( "homeassistant.components.hassio.HassIO.is_connected", return_value=None, ): result = await async_setup_component(hass, "hassio", {}) assert result assert hass.components.hassio.is_hassio() assert "Not connected with Hass.io / system too busy!" in caplog.text async def test_service_register(hassio_env, hass): """Check if service will be setup.""" assert await async_setup_component(hass, "hassio", {}) assert hass.services.has_service("hassio", "addon_start") assert hass.services.has_service("hassio", "addon_stop") assert hass.services.has_service("hassio", "addon_restart") assert hass.services.has_service("hassio", "addon_stdin") assert hass.services.has_service("hassio", "host_shutdown") assert hass.services.has_service("hassio", "host_reboot") assert hass.services.has_service("hassio", "host_reboot") assert hass.services.has_service("hassio", "snapshot_full") assert hass.services.has_service("hassio", "snapshot_partial") assert hass.services.has_service("hassio", "restore_full") assert hass.services.has_service("hassio", "restore_partial") async def test_service_calls(hassio_env, hass, aioclient_mock): """Call service and check the API calls behind that.""" assert await async_setup_component(hass, "hassio", {}) aioclient_mock.post("http://127.0.0.1/addons/test/start", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/addons/test/stop", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/addons/test/restart", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/addons/test/stdin", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/host/shutdown", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/host/reboot", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/snapshots/new/full", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/snapshots/new/partial", json={"result": "ok"}) aioclient_mock.post( "http://127.0.0.1/snapshots/test/restore/full", json={"result": "ok"} ) aioclient_mock.post( "http://127.0.0.1/snapshots/test/restore/partial", json={"result": "ok"} ) await hass.services.async_call("hassio", "addon_start", {"addon": "test"}) await hass.services.async_call("hassio", "addon_stop", {"addon": "test"}) await hass.services.async_call("hassio", "addon_restart", {"addon": "test"}) await hass.services.async_call( "hassio", "addon_stdin", {"addon": "test", "input": "test"} ) await hass.async_block_till_done() assert aioclient_mock.call_count == 7 assert aioclient_mock.mock_calls[-1][2] == "test" await hass.services.async_call("hassio", "host_shutdown", {}) await hass.services.async_call("hassio", "host_reboot", {}) await hass.async_block_till_done() assert aioclient_mock.call_count == 9 await hass.services.async_call("hassio", "snapshot_full", {}) await hass.services.async_call( "hassio", "snapshot_partial", {"addons": ["test"], "folders": ["ssl"], "password": "123456"}, ) await hass.async_block_till_done() assert aioclient_mock.call_count == 11 assert aioclient_mock.mock_calls[-1][2] == { "addons": ["test"], "folders": ["ssl"], "password": "123456", } await hass.services.async_call("hassio", "restore_full", {"snapshot": "test"}) await hass.services.async_call( "hassio", "restore_partial", { "snapshot": "test", "homeassistant": False, "addons": ["test"], "folders": ["ssl"], "password": "123456", }, ) await hass.async_block_till_done() assert aioclient_mock.call_count == 13 assert aioclient_mock.mock_calls[-1][2] == { "addons": ["test"], "folders": ["ssl"], "homeassistant": False, "password": "123456", } async def test_service_calls_core(hassio_env, hass, aioclient_mock): """Call core service and check the API calls behind that.""" assert await async_setup_component(hass, "hassio", {}) aioclient_mock.post("http://127.0.0.1/homeassistant/restart", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/homeassistant/stop", json={"result": "ok"}) await hass.services.async_call("homeassistant", "stop") await hass.async_block_till_done() assert aioclient_mock.call_count == 4 await hass.services.async_call("homeassistant", "check_config") await hass.async_block_till_done() assert aioclient_mock.call_count == 4 with patch( "homeassistant.config.async_check_ha_config_file", return_value=None ) as mock_check_config: await hass.services.async_call("homeassistant", "restart") await hass.async_block_till_done() assert mock_check_config.called assert aioclient_mock.call_count == 5
pschmitt/home-assistant
tests/components/hassio/test_init.py
homeassistant/components/ping/binary_sensor.py
"""Support for Volvo heater.""" import logging from homeassistant.helpers.entity import ToggleEntity from . import DATA_KEY, VolvoEntity _LOGGER = logging.getLogger(__name__) async def async_setup_platform(hass, config, async_add_entities, discovery_info=None): """Set up a Volvo switch.""" if discovery_info is None: return async_add_entities([VolvoSwitch(hass.data[DATA_KEY], *discovery_info)]) class VolvoSwitch(VolvoEntity, ToggleEntity): """Representation of a Volvo switch.""" @property def is_on(self): """Return true if switch is on.""" return self.instrument.state async def async_turn_on(self, **kwargs): """Turn the switch on.""" await self.instrument.turn_on() self.async_write_ha_state() async def async_turn_off(self, **kwargs): """Turn the switch off.""" await self.instrument.turn_off() self.async_write_ha_state()
"""The tests for the hassio component.""" import os import pytest from homeassistant.auth.const import GROUP_ID_ADMIN from homeassistant.components import frontend from homeassistant.components.hassio import STORAGE_KEY from homeassistant.setup import async_setup_component from tests.async_mock import patch MOCK_ENVIRON = {"HASSIO": "127.0.0.1", "HASSIO_TOKEN": "abcdefgh"} @pytest.fixture(autouse=True) def mock_all(aioclient_mock): """Mock all setup requests.""" aioclient_mock.post("http://127.0.0.1/homeassistant/options", json={"result": "ok"}) aioclient_mock.get("http://127.0.0.1/supervisor/ping", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/supervisor/options", json={"result": "ok"}) aioclient_mock.get( "http://127.0.0.1/info", json={ "result": "ok", "data": {"supervisor": "222", "homeassistant": "0.110.0", "hassos": None}, }, ) aioclient_mock.get( "http://127.0.0.1/host/info", json={ "result": "ok", "data": { "result": "ok", "data": { "chassis": "vm", "operating_system": "Debian GNU/Linux 10 (buster)", "kernel": "4.19.0-6-amd64", }, }, }, ) aioclient_mock.get( "http://127.0.0.1/ingress/panels", json={"result": "ok", "data": {"panels": {}}} ) async def test_setup_api_ping(hass, aioclient_mock): """Test setup with API ping.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {}) assert result assert aioclient_mock.call_count == 6 assert hass.components.hassio.get_homeassistant_version() == "0.110.0" assert hass.components.hassio.is_hassio() async def test_setup_api_panel(hass, aioclient_mock): """Test setup with API ping.""" assert await async_setup_component(hass, "frontend", {}) with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {}) assert result panels = hass.data[frontend.DATA_PANELS] assert panels.get("hassio").to_response() == { "component_name": "custom", "icon": "hass:home-assistant", "title": "Supervisor", "url_path": "hassio", "require_admin": True, "config": { "_panel_custom": { "embed_iframe": True, "js_url": "/api/hassio/app/entrypoint.js", "name": "hassio-main", "trust_external": False, } }, } async def test_setup_api_push_api_data(hass, aioclient_mock): """Test setup with API push.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component( hass, "hassio", {"http": {"server_port": 9999}, "hassio": {}} ) assert result assert aioclient_mock.call_count == 6 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 9999 assert aioclient_mock.mock_calls[1][2]["watchdog"] async def test_setup_api_push_api_data_server_host(hass, aioclient_mock): """Test setup with API push with active server host.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component( hass, "hassio", {"http": {"server_port": 9999, "server_host": "127.0.0.1"}, "hassio": {}}, ) assert result assert aioclient_mock.call_count == 6 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 9999 assert not aioclient_mock.mock_calls[1][2]["watchdog"] async def test_setup_api_push_api_data_default(hass, aioclient_mock, hass_storage): """Test setup with API push default data.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"http": {}, "hassio": {}}) assert result assert aioclient_mock.call_count == 6 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 8123 refresh_token = aioclient_mock.mock_calls[1][2]["refresh_token"] hassio_user = await hass.auth.async_get_user( hass_storage[STORAGE_KEY]["data"]["hassio_user"] ) assert hassio_user is not None assert hassio_user.system_generated assert len(hassio_user.groups) == 1 assert hassio_user.groups[0].id == GROUP_ID_ADMIN for token in hassio_user.refresh_tokens.values(): if token.token == refresh_token: break else: assert False, "refresh token not found" async def test_setup_adds_admin_group_to_user(hass, aioclient_mock, hass_storage): """Test setup with API push default data.""" # Create user without admin user = await hass.auth.async_create_system_user("Hass.io") assert not user.is_admin await hass.auth.async_create_refresh_token(user) hass_storage[STORAGE_KEY] = { "data": {"hassio_user": user.id}, "key": STORAGE_KEY, "version": 1, } with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"http": {}, "hassio": {}}) assert result assert user.is_admin async def test_setup_api_existing_hassio_user(hass, aioclient_mock, hass_storage): """Test setup with API push default data.""" user = await hass.auth.async_create_system_user("Hass.io test") token = await hass.auth.async_create_refresh_token(user) hass_storage[STORAGE_KEY] = {"version": 1, "data": {"hassio_user": user.id}} with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"http": {}, "hassio": {}}) assert result assert aioclient_mock.call_count == 6 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 8123 assert aioclient_mock.mock_calls[1][2]["refresh_token"] == token.token async def test_setup_core_push_timezone(hass, aioclient_mock): """Test setup with API push default data.""" hass.config.time_zone = "testzone" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"hassio": {}}) assert result assert aioclient_mock.call_count == 6 assert aioclient_mock.mock_calls[2][2]["timezone"] == "testzone" await hass.config.async_update(time_zone="America/New_York") await hass.async_block_till_done() assert aioclient_mock.mock_calls[-1][2]["timezone"] == "America/New_York" async def test_setup_hassio_no_additional_data(hass, aioclient_mock): """Test setup with API push default data.""" with patch.dict(os.environ, MOCK_ENVIRON), patch.dict( os.environ, {"HASSIO_TOKEN": "123456"} ): result = await async_setup_component(hass, "hassio", {"hassio": {}}) assert result assert aioclient_mock.call_count == 6 assert aioclient_mock.mock_calls[-1][3]["X-Hassio-Key"] == "123456" async def test_fail_setup_without_environ_var(hass): """Fail setup if no environ variable set.""" with patch.dict(os.environ, {}, clear=True): result = await async_setup_component(hass, "hassio", {}) assert not result async def test_warn_when_cannot_connect(hass, caplog): """Fail warn when we cannot connect.""" with patch.dict(os.environ, MOCK_ENVIRON), patch( "homeassistant.components.hassio.HassIO.is_connected", return_value=None, ): result = await async_setup_component(hass, "hassio", {}) assert result assert hass.components.hassio.is_hassio() assert "Not connected with Hass.io / system too busy!" in caplog.text async def test_service_register(hassio_env, hass): """Check if service will be setup.""" assert await async_setup_component(hass, "hassio", {}) assert hass.services.has_service("hassio", "addon_start") assert hass.services.has_service("hassio", "addon_stop") assert hass.services.has_service("hassio", "addon_restart") assert hass.services.has_service("hassio", "addon_stdin") assert hass.services.has_service("hassio", "host_shutdown") assert hass.services.has_service("hassio", "host_reboot") assert hass.services.has_service("hassio", "host_reboot") assert hass.services.has_service("hassio", "snapshot_full") assert hass.services.has_service("hassio", "snapshot_partial") assert hass.services.has_service("hassio", "restore_full") assert hass.services.has_service("hassio", "restore_partial") async def test_service_calls(hassio_env, hass, aioclient_mock): """Call service and check the API calls behind that.""" assert await async_setup_component(hass, "hassio", {}) aioclient_mock.post("http://127.0.0.1/addons/test/start", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/addons/test/stop", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/addons/test/restart", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/addons/test/stdin", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/host/shutdown", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/host/reboot", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/snapshots/new/full", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/snapshots/new/partial", json={"result": "ok"}) aioclient_mock.post( "http://127.0.0.1/snapshots/test/restore/full", json={"result": "ok"} ) aioclient_mock.post( "http://127.0.0.1/snapshots/test/restore/partial", json={"result": "ok"} ) await hass.services.async_call("hassio", "addon_start", {"addon": "test"}) await hass.services.async_call("hassio", "addon_stop", {"addon": "test"}) await hass.services.async_call("hassio", "addon_restart", {"addon": "test"}) await hass.services.async_call( "hassio", "addon_stdin", {"addon": "test", "input": "test"} ) await hass.async_block_till_done() assert aioclient_mock.call_count == 7 assert aioclient_mock.mock_calls[-1][2] == "test" await hass.services.async_call("hassio", "host_shutdown", {}) await hass.services.async_call("hassio", "host_reboot", {}) await hass.async_block_till_done() assert aioclient_mock.call_count == 9 await hass.services.async_call("hassio", "snapshot_full", {}) await hass.services.async_call( "hassio", "snapshot_partial", {"addons": ["test"], "folders": ["ssl"], "password": "123456"}, ) await hass.async_block_till_done() assert aioclient_mock.call_count == 11 assert aioclient_mock.mock_calls[-1][2] == { "addons": ["test"], "folders": ["ssl"], "password": "123456", } await hass.services.async_call("hassio", "restore_full", {"snapshot": "test"}) await hass.services.async_call( "hassio", "restore_partial", { "snapshot": "test", "homeassistant": False, "addons": ["test"], "folders": ["ssl"], "password": "123456", }, ) await hass.async_block_till_done() assert aioclient_mock.call_count == 13 assert aioclient_mock.mock_calls[-1][2] == { "addons": ["test"], "folders": ["ssl"], "homeassistant": False, "password": "123456", } async def test_service_calls_core(hassio_env, hass, aioclient_mock): """Call core service and check the API calls behind that.""" assert await async_setup_component(hass, "hassio", {}) aioclient_mock.post("http://127.0.0.1/homeassistant/restart", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/homeassistant/stop", json={"result": "ok"}) await hass.services.async_call("homeassistant", "stop") await hass.async_block_till_done() assert aioclient_mock.call_count == 4 await hass.services.async_call("homeassistant", "check_config") await hass.async_block_till_done() assert aioclient_mock.call_count == 4 with patch( "homeassistant.config.async_check_ha_config_file", return_value=None ) as mock_check_config: await hass.services.async_call("homeassistant", "restart") await hass.async_block_till_done() assert mock_check_config.called assert aioclient_mock.call_count == 5
pschmitt/home-assistant
tests/components/hassio/test_init.py
homeassistant/components/volvooncall/switch.py
"""Support for MySensors lights.""" from homeassistant.components import mysensors from homeassistant.components.light import ( ATTR_BRIGHTNESS, ATTR_HS_COLOR, ATTR_WHITE_VALUE, DOMAIN, SUPPORT_BRIGHTNESS, SUPPORT_COLOR, SUPPORT_WHITE_VALUE, LightEntity, ) from homeassistant.const import STATE_OFF, STATE_ON from homeassistant.core import callback import homeassistant.util.color as color_util from homeassistant.util.color import rgb_hex_to_rgb_list SUPPORT_MYSENSORS_RGBW = SUPPORT_COLOR | SUPPORT_WHITE_VALUE async def async_setup_platform(hass, config, async_add_entities, discovery_info=None): """Set up the mysensors platform for lights.""" device_class_map = { "S_DIMMER": MySensorsLightDimmer, "S_RGB_LIGHT": MySensorsLightRGB, "S_RGBW_LIGHT": MySensorsLightRGBW, } mysensors.setup_mysensors_platform( hass, DOMAIN, discovery_info, device_class_map, async_add_entities=async_add_entities, ) class MySensorsLight(mysensors.device.MySensorsEntity, LightEntity): """Representation of a MySensors Light child node.""" def __init__(self, *args): """Initialize a MySensors Light.""" super().__init__(*args) self._state = None self._brightness = None self._hs = None self._white = None @property def brightness(self): """Return the brightness of this light between 0..255.""" return self._brightness @property def hs_color(self): """Return the hs color value [int, int].""" return self._hs @property def white_value(self): """Return the white value of this light between 0..255.""" return self._white @property def assumed_state(self): """Return true if unable to access real state of entity.""" return self.gateway.optimistic @property def is_on(self): """Return true if device is on.""" return self._state def _turn_on_light(self): """Turn on light child device.""" set_req = self.gateway.const.SetReq if self._state: return self.gateway.set_child_value( self.node_id, self.child_id, set_req.V_LIGHT, 1, ack=1 ) if self.gateway.optimistic: # optimistically assume that light has changed state self._state = True self._values[set_req.V_LIGHT] = STATE_ON def _turn_on_dimmer(self, **kwargs): """Turn on dimmer child device.""" set_req = self.gateway.const.SetReq brightness = self._brightness if ( ATTR_BRIGHTNESS not in kwargs or kwargs[ATTR_BRIGHTNESS] == self._brightness or set_req.V_DIMMER not in self._values ): return brightness = kwargs[ATTR_BRIGHTNESS] percent = round(100 * brightness / 255) self.gateway.set_child_value( self.node_id, self.child_id, set_req.V_DIMMER, percent, ack=1 ) if self.gateway.optimistic: # optimistically assume that light has changed state self._brightness = brightness self._values[set_req.V_DIMMER] = percent def _turn_on_rgb_and_w(self, hex_template, **kwargs): """Turn on RGB or RGBW child device.""" rgb = list(color_util.color_hs_to_RGB(*self._hs)) white = self._white hex_color = self._values.get(self.value_type) hs_color = kwargs.get(ATTR_HS_COLOR) if hs_color is not None: new_rgb = color_util.color_hs_to_RGB(*hs_color) else: new_rgb = None new_white = kwargs.get(ATTR_WHITE_VALUE) if new_rgb is None and new_white is None: return if new_rgb is not None: rgb = list(new_rgb) if hex_template == "%02x%02x%02x%02x": if new_white is not None: rgb.append(new_white) else: rgb.append(white) hex_color = hex_template % tuple(rgb) if len(rgb) > 3: white = rgb.pop() self.gateway.set_child_value( self.node_id, self.child_id, self.value_type, hex_color, ack=1 ) if self.gateway.optimistic: # optimistically assume that light has changed state self._hs = color_util.color_RGB_to_hs(*rgb) self._white = white self._values[self.value_type] = hex_color async def async_turn_off(self, **kwargs): """Turn the device off.""" value_type = self.gateway.const.SetReq.V_LIGHT self.gateway.set_child_value(self.node_id, self.child_id, value_type, 0, ack=1) if self.gateway.optimistic: # optimistically assume that light has changed state self._state = False self._values[value_type] = STATE_OFF self.async_write_ha_state() @callback def _async_update_light(self): """Update the controller with values from light child.""" value_type = self.gateway.const.SetReq.V_LIGHT self._state = self._values[value_type] == STATE_ON @callback def _async_update_dimmer(self): """Update the controller with values from dimmer child.""" value_type = self.gateway.const.SetReq.V_DIMMER if value_type in self._values: self._brightness = round(255 * int(self._values[value_type]) / 100) if self._brightness == 0: self._state = False @callback def _async_update_rgb_or_w(self): """Update the controller with values from RGB or RGBW child.""" value = self._values[self.value_type] color_list = rgb_hex_to_rgb_list(value) if len(color_list) > 3: self._white = color_list.pop() self._hs = color_util.color_RGB_to_hs(*color_list) class MySensorsLightDimmer(MySensorsLight): """Dimmer child class to MySensorsLight.""" @property def supported_features(self): """Flag supported features.""" return SUPPORT_BRIGHTNESS async def async_turn_on(self, **kwargs): """Turn the device on.""" self._turn_on_light() self._turn_on_dimmer(**kwargs) if self.gateway.optimistic: self.async_write_ha_state() async def async_update(self): """Update the controller with the latest value from a sensor.""" await super().async_update() self._async_update_light() self._async_update_dimmer() class MySensorsLightRGB(MySensorsLight): """RGB child class to MySensorsLight.""" @property def supported_features(self): """Flag supported features.""" set_req = self.gateway.const.SetReq if set_req.V_DIMMER in self._values: return SUPPORT_BRIGHTNESS | SUPPORT_COLOR return SUPPORT_COLOR async def async_turn_on(self, **kwargs): """Turn the device on.""" self._turn_on_light() self._turn_on_dimmer(**kwargs) self._turn_on_rgb_and_w("%02x%02x%02x", **kwargs) if self.gateway.optimistic: self.async_write_ha_state() async def async_update(self): """Update the controller with the latest value from a sensor.""" await super().async_update() self._async_update_light() self._async_update_dimmer() self._async_update_rgb_or_w() class MySensorsLightRGBW(MySensorsLightRGB): """RGBW child class to MySensorsLightRGB.""" @property def supported_features(self): """Flag supported features.""" set_req = self.gateway.const.SetReq if set_req.V_DIMMER in self._values: return SUPPORT_BRIGHTNESS | SUPPORT_MYSENSORS_RGBW return SUPPORT_MYSENSORS_RGBW async def async_turn_on(self, **kwargs): """Turn the device on.""" self._turn_on_light() self._turn_on_dimmer(**kwargs) self._turn_on_rgb_and_w("%02x%02x%02x%02x", **kwargs) if self.gateway.optimistic: self.async_write_ha_state()
"""The tests for the hassio component.""" import os import pytest from homeassistant.auth.const import GROUP_ID_ADMIN from homeassistant.components import frontend from homeassistant.components.hassio import STORAGE_KEY from homeassistant.setup import async_setup_component from tests.async_mock import patch MOCK_ENVIRON = {"HASSIO": "127.0.0.1", "HASSIO_TOKEN": "abcdefgh"} @pytest.fixture(autouse=True) def mock_all(aioclient_mock): """Mock all setup requests.""" aioclient_mock.post("http://127.0.0.1/homeassistant/options", json={"result": "ok"}) aioclient_mock.get("http://127.0.0.1/supervisor/ping", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/supervisor/options", json={"result": "ok"}) aioclient_mock.get( "http://127.0.0.1/info", json={ "result": "ok", "data": {"supervisor": "222", "homeassistant": "0.110.0", "hassos": None}, }, ) aioclient_mock.get( "http://127.0.0.1/host/info", json={ "result": "ok", "data": { "result": "ok", "data": { "chassis": "vm", "operating_system": "Debian GNU/Linux 10 (buster)", "kernel": "4.19.0-6-amd64", }, }, }, ) aioclient_mock.get( "http://127.0.0.1/ingress/panels", json={"result": "ok", "data": {"panels": {}}} ) async def test_setup_api_ping(hass, aioclient_mock): """Test setup with API ping.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {}) assert result assert aioclient_mock.call_count == 6 assert hass.components.hassio.get_homeassistant_version() == "0.110.0" assert hass.components.hassio.is_hassio() async def test_setup_api_panel(hass, aioclient_mock): """Test setup with API ping.""" assert await async_setup_component(hass, "frontend", {}) with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {}) assert result panels = hass.data[frontend.DATA_PANELS] assert panels.get("hassio").to_response() == { "component_name": "custom", "icon": "hass:home-assistant", "title": "Supervisor", "url_path": "hassio", "require_admin": True, "config": { "_panel_custom": { "embed_iframe": True, "js_url": "/api/hassio/app/entrypoint.js", "name": "hassio-main", "trust_external": False, } }, } async def test_setup_api_push_api_data(hass, aioclient_mock): """Test setup with API push.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component( hass, "hassio", {"http": {"server_port": 9999}, "hassio": {}} ) assert result assert aioclient_mock.call_count == 6 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 9999 assert aioclient_mock.mock_calls[1][2]["watchdog"] async def test_setup_api_push_api_data_server_host(hass, aioclient_mock): """Test setup with API push with active server host.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component( hass, "hassio", {"http": {"server_port": 9999, "server_host": "127.0.0.1"}, "hassio": {}}, ) assert result assert aioclient_mock.call_count == 6 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 9999 assert not aioclient_mock.mock_calls[1][2]["watchdog"] async def test_setup_api_push_api_data_default(hass, aioclient_mock, hass_storage): """Test setup with API push default data.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"http": {}, "hassio": {}}) assert result assert aioclient_mock.call_count == 6 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 8123 refresh_token = aioclient_mock.mock_calls[1][2]["refresh_token"] hassio_user = await hass.auth.async_get_user( hass_storage[STORAGE_KEY]["data"]["hassio_user"] ) assert hassio_user is not None assert hassio_user.system_generated assert len(hassio_user.groups) == 1 assert hassio_user.groups[0].id == GROUP_ID_ADMIN for token in hassio_user.refresh_tokens.values(): if token.token == refresh_token: break else: assert False, "refresh token not found" async def test_setup_adds_admin_group_to_user(hass, aioclient_mock, hass_storage): """Test setup with API push default data.""" # Create user without admin user = await hass.auth.async_create_system_user("Hass.io") assert not user.is_admin await hass.auth.async_create_refresh_token(user) hass_storage[STORAGE_KEY] = { "data": {"hassio_user": user.id}, "key": STORAGE_KEY, "version": 1, } with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"http": {}, "hassio": {}}) assert result assert user.is_admin async def test_setup_api_existing_hassio_user(hass, aioclient_mock, hass_storage): """Test setup with API push default data.""" user = await hass.auth.async_create_system_user("Hass.io test") token = await hass.auth.async_create_refresh_token(user) hass_storage[STORAGE_KEY] = {"version": 1, "data": {"hassio_user": user.id}} with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"http": {}, "hassio": {}}) assert result assert aioclient_mock.call_count == 6 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 8123 assert aioclient_mock.mock_calls[1][2]["refresh_token"] == token.token async def test_setup_core_push_timezone(hass, aioclient_mock): """Test setup with API push default data.""" hass.config.time_zone = "testzone" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"hassio": {}}) assert result assert aioclient_mock.call_count == 6 assert aioclient_mock.mock_calls[2][2]["timezone"] == "testzone" await hass.config.async_update(time_zone="America/New_York") await hass.async_block_till_done() assert aioclient_mock.mock_calls[-1][2]["timezone"] == "America/New_York" async def test_setup_hassio_no_additional_data(hass, aioclient_mock): """Test setup with API push default data.""" with patch.dict(os.environ, MOCK_ENVIRON), patch.dict( os.environ, {"HASSIO_TOKEN": "123456"} ): result = await async_setup_component(hass, "hassio", {"hassio": {}}) assert result assert aioclient_mock.call_count == 6 assert aioclient_mock.mock_calls[-1][3]["X-Hassio-Key"] == "123456" async def test_fail_setup_without_environ_var(hass): """Fail setup if no environ variable set.""" with patch.dict(os.environ, {}, clear=True): result = await async_setup_component(hass, "hassio", {}) assert not result async def test_warn_when_cannot_connect(hass, caplog): """Fail warn when we cannot connect.""" with patch.dict(os.environ, MOCK_ENVIRON), patch( "homeassistant.components.hassio.HassIO.is_connected", return_value=None, ): result = await async_setup_component(hass, "hassio", {}) assert result assert hass.components.hassio.is_hassio() assert "Not connected with Hass.io / system too busy!" in caplog.text async def test_service_register(hassio_env, hass): """Check if service will be setup.""" assert await async_setup_component(hass, "hassio", {}) assert hass.services.has_service("hassio", "addon_start") assert hass.services.has_service("hassio", "addon_stop") assert hass.services.has_service("hassio", "addon_restart") assert hass.services.has_service("hassio", "addon_stdin") assert hass.services.has_service("hassio", "host_shutdown") assert hass.services.has_service("hassio", "host_reboot") assert hass.services.has_service("hassio", "host_reboot") assert hass.services.has_service("hassio", "snapshot_full") assert hass.services.has_service("hassio", "snapshot_partial") assert hass.services.has_service("hassio", "restore_full") assert hass.services.has_service("hassio", "restore_partial") async def test_service_calls(hassio_env, hass, aioclient_mock): """Call service and check the API calls behind that.""" assert await async_setup_component(hass, "hassio", {}) aioclient_mock.post("http://127.0.0.1/addons/test/start", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/addons/test/stop", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/addons/test/restart", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/addons/test/stdin", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/host/shutdown", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/host/reboot", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/snapshots/new/full", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/snapshots/new/partial", json={"result": "ok"}) aioclient_mock.post( "http://127.0.0.1/snapshots/test/restore/full", json={"result": "ok"} ) aioclient_mock.post( "http://127.0.0.1/snapshots/test/restore/partial", json={"result": "ok"} ) await hass.services.async_call("hassio", "addon_start", {"addon": "test"}) await hass.services.async_call("hassio", "addon_stop", {"addon": "test"}) await hass.services.async_call("hassio", "addon_restart", {"addon": "test"}) await hass.services.async_call( "hassio", "addon_stdin", {"addon": "test", "input": "test"} ) await hass.async_block_till_done() assert aioclient_mock.call_count == 7 assert aioclient_mock.mock_calls[-1][2] == "test" await hass.services.async_call("hassio", "host_shutdown", {}) await hass.services.async_call("hassio", "host_reboot", {}) await hass.async_block_till_done() assert aioclient_mock.call_count == 9 await hass.services.async_call("hassio", "snapshot_full", {}) await hass.services.async_call( "hassio", "snapshot_partial", {"addons": ["test"], "folders": ["ssl"], "password": "123456"}, ) await hass.async_block_till_done() assert aioclient_mock.call_count == 11 assert aioclient_mock.mock_calls[-1][2] == { "addons": ["test"], "folders": ["ssl"], "password": "123456", } await hass.services.async_call("hassio", "restore_full", {"snapshot": "test"}) await hass.services.async_call( "hassio", "restore_partial", { "snapshot": "test", "homeassistant": False, "addons": ["test"], "folders": ["ssl"], "password": "123456", }, ) await hass.async_block_till_done() assert aioclient_mock.call_count == 13 assert aioclient_mock.mock_calls[-1][2] == { "addons": ["test"], "folders": ["ssl"], "homeassistant": False, "password": "123456", } async def test_service_calls_core(hassio_env, hass, aioclient_mock): """Call core service and check the API calls behind that.""" assert await async_setup_component(hass, "hassio", {}) aioclient_mock.post("http://127.0.0.1/homeassistant/restart", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/homeassistant/stop", json={"result": "ok"}) await hass.services.async_call("homeassistant", "stop") await hass.async_block_till_done() assert aioclient_mock.call_count == 4 await hass.services.async_call("homeassistant", "check_config") await hass.async_block_till_done() assert aioclient_mock.call_count == 4 with patch( "homeassistant.config.async_check_ha_config_file", return_value=None ) as mock_check_config: await hass.services.async_call("homeassistant", "restart") await hass.async_block_till_done() assert mock_check_config.called assert aioclient_mock.call_count == 5
pschmitt/home-assistant
tests/components/hassio/test_init.py
homeassistant/components/mysensors/light.py
"""An abstract class common to all Bond entities.""" from abc import abstractmethod from asyncio import TimeoutError as AsyncIOTimeoutError import logging from typing import Any, Dict, Optional from aiohttp import ClientError from homeassistant.const import ATTR_NAME from homeassistant.helpers.entity import Entity from .const import DOMAIN from .utils import BondDevice, BondHub _LOGGER = logging.getLogger(__name__) class BondEntity(Entity): """Generic Bond entity encapsulating common features of any Bond controlled device.""" def __init__(self, hub: BondHub, device: BondDevice): """Initialize entity with API and device info.""" self._hub = hub self._device = device self._available = True @property def unique_id(self) -> Optional[str]: """Get unique ID for the entity.""" return self._device.device_id @property def name(self) -> Optional[str]: """Get entity name.""" return self._device.name @property def device_info(self) -> Optional[Dict[str, Any]]: """Get a an HA device representing this Bond controlled device.""" return { ATTR_NAME: self.name, "identifiers": {(DOMAIN, self._device.device_id)}, "via_device": (DOMAIN, self._hub.bond_id), } @property def assumed_state(self) -> bool: """Let HA know this entity relies on an assumed state tracked by Bond.""" return True @property def available(self) -> bool: """Report availability of this entity based on last API call results.""" return self._available async def async_update(self): """Fetch assumed state of the cover from the hub using API.""" try: state: dict = await self._hub.bond.device_state(self._device.device_id) except (ClientError, AsyncIOTimeoutError, OSError) as error: if self._available: _LOGGER.warning( "Entity %s has become unavailable", self.entity_id, exc_info=error ) self._available = False else: if not self._available: _LOGGER.info("Entity %s has come back", self.entity_id) self._available = True self._apply_state(state) @abstractmethod def _apply_state(self, state: dict): raise NotImplementedError
"""The tests for the hassio component.""" import os import pytest from homeassistant.auth.const import GROUP_ID_ADMIN from homeassistant.components import frontend from homeassistant.components.hassio import STORAGE_KEY from homeassistant.setup import async_setup_component from tests.async_mock import patch MOCK_ENVIRON = {"HASSIO": "127.0.0.1", "HASSIO_TOKEN": "abcdefgh"} @pytest.fixture(autouse=True) def mock_all(aioclient_mock): """Mock all setup requests.""" aioclient_mock.post("http://127.0.0.1/homeassistant/options", json={"result": "ok"}) aioclient_mock.get("http://127.0.0.1/supervisor/ping", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/supervisor/options", json={"result": "ok"}) aioclient_mock.get( "http://127.0.0.1/info", json={ "result": "ok", "data": {"supervisor": "222", "homeassistant": "0.110.0", "hassos": None}, }, ) aioclient_mock.get( "http://127.0.0.1/host/info", json={ "result": "ok", "data": { "result": "ok", "data": { "chassis": "vm", "operating_system": "Debian GNU/Linux 10 (buster)", "kernel": "4.19.0-6-amd64", }, }, }, ) aioclient_mock.get( "http://127.0.0.1/ingress/panels", json={"result": "ok", "data": {"panels": {}}} ) async def test_setup_api_ping(hass, aioclient_mock): """Test setup with API ping.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {}) assert result assert aioclient_mock.call_count == 6 assert hass.components.hassio.get_homeassistant_version() == "0.110.0" assert hass.components.hassio.is_hassio() async def test_setup_api_panel(hass, aioclient_mock): """Test setup with API ping.""" assert await async_setup_component(hass, "frontend", {}) with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {}) assert result panels = hass.data[frontend.DATA_PANELS] assert panels.get("hassio").to_response() == { "component_name": "custom", "icon": "hass:home-assistant", "title": "Supervisor", "url_path": "hassio", "require_admin": True, "config": { "_panel_custom": { "embed_iframe": True, "js_url": "/api/hassio/app/entrypoint.js", "name": "hassio-main", "trust_external": False, } }, } async def test_setup_api_push_api_data(hass, aioclient_mock): """Test setup with API push.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component( hass, "hassio", {"http": {"server_port": 9999}, "hassio": {}} ) assert result assert aioclient_mock.call_count == 6 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 9999 assert aioclient_mock.mock_calls[1][2]["watchdog"] async def test_setup_api_push_api_data_server_host(hass, aioclient_mock): """Test setup with API push with active server host.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component( hass, "hassio", {"http": {"server_port": 9999, "server_host": "127.0.0.1"}, "hassio": {}}, ) assert result assert aioclient_mock.call_count == 6 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 9999 assert not aioclient_mock.mock_calls[1][2]["watchdog"] async def test_setup_api_push_api_data_default(hass, aioclient_mock, hass_storage): """Test setup with API push default data.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"http": {}, "hassio": {}}) assert result assert aioclient_mock.call_count == 6 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 8123 refresh_token = aioclient_mock.mock_calls[1][2]["refresh_token"] hassio_user = await hass.auth.async_get_user( hass_storage[STORAGE_KEY]["data"]["hassio_user"] ) assert hassio_user is not None assert hassio_user.system_generated assert len(hassio_user.groups) == 1 assert hassio_user.groups[0].id == GROUP_ID_ADMIN for token in hassio_user.refresh_tokens.values(): if token.token == refresh_token: break else: assert False, "refresh token not found" async def test_setup_adds_admin_group_to_user(hass, aioclient_mock, hass_storage): """Test setup with API push default data.""" # Create user without admin user = await hass.auth.async_create_system_user("Hass.io") assert not user.is_admin await hass.auth.async_create_refresh_token(user) hass_storage[STORAGE_KEY] = { "data": {"hassio_user": user.id}, "key": STORAGE_KEY, "version": 1, } with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"http": {}, "hassio": {}}) assert result assert user.is_admin async def test_setup_api_existing_hassio_user(hass, aioclient_mock, hass_storage): """Test setup with API push default data.""" user = await hass.auth.async_create_system_user("Hass.io test") token = await hass.auth.async_create_refresh_token(user) hass_storage[STORAGE_KEY] = {"version": 1, "data": {"hassio_user": user.id}} with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"http": {}, "hassio": {}}) assert result assert aioclient_mock.call_count == 6 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 8123 assert aioclient_mock.mock_calls[1][2]["refresh_token"] == token.token async def test_setup_core_push_timezone(hass, aioclient_mock): """Test setup with API push default data.""" hass.config.time_zone = "testzone" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"hassio": {}}) assert result assert aioclient_mock.call_count == 6 assert aioclient_mock.mock_calls[2][2]["timezone"] == "testzone" await hass.config.async_update(time_zone="America/New_York") await hass.async_block_till_done() assert aioclient_mock.mock_calls[-1][2]["timezone"] == "America/New_York" async def test_setup_hassio_no_additional_data(hass, aioclient_mock): """Test setup with API push default data.""" with patch.dict(os.environ, MOCK_ENVIRON), patch.dict( os.environ, {"HASSIO_TOKEN": "123456"} ): result = await async_setup_component(hass, "hassio", {"hassio": {}}) assert result assert aioclient_mock.call_count == 6 assert aioclient_mock.mock_calls[-1][3]["X-Hassio-Key"] == "123456" async def test_fail_setup_without_environ_var(hass): """Fail setup if no environ variable set.""" with patch.dict(os.environ, {}, clear=True): result = await async_setup_component(hass, "hassio", {}) assert not result async def test_warn_when_cannot_connect(hass, caplog): """Fail warn when we cannot connect.""" with patch.dict(os.environ, MOCK_ENVIRON), patch( "homeassistant.components.hassio.HassIO.is_connected", return_value=None, ): result = await async_setup_component(hass, "hassio", {}) assert result assert hass.components.hassio.is_hassio() assert "Not connected with Hass.io / system too busy!" in caplog.text async def test_service_register(hassio_env, hass): """Check if service will be setup.""" assert await async_setup_component(hass, "hassio", {}) assert hass.services.has_service("hassio", "addon_start") assert hass.services.has_service("hassio", "addon_stop") assert hass.services.has_service("hassio", "addon_restart") assert hass.services.has_service("hassio", "addon_stdin") assert hass.services.has_service("hassio", "host_shutdown") assert hass.services.has_service("hassio", "host_reboot") assert hass.services.has_service("hassio", "host_reboot") assert hass.services.has_service("hassio", "snapshot_full") assert hass.services.has_service("hassio", "snapshot_partial") assert hass.services.has_service("hassio", "restore_full") assert hass.services.has_service("hassio", "restore_partial") async def test_service_calls(hassio_env, hass, aioclient_mock): """Call service and check the API calls behind that.""" assert await async_setup_component(hass, "hassio", {}) aioclient_mock.post("http://127.0.0.1/addons/test/start", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/addons/test/stop", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/addons/test/restart", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/addons/test/stdin", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/host/shutdown", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/host/reboot", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/snapshots/new/full", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/snapshots/new/partial", json={"result": "ok"}) aioclient_mock.post( "http://127.0.0.1/snapshots/test/restore/full", json={"result": "ok"} ) aioclient_mock.post( "http://127.0.0.1/snapshots/test/restore/partial", json={"result": "ok"} ) await hass.services.async_call("hassio", "addon_start", {"addon": "test"}) await hass.services.async_call("hassio", "addon_stop", {"addon": "test"}) await hass.services.async_call("hassio", "addon_restart", {"addon": "test"}) await hass.services.async_call( "hassio", "addon_stdin", {"addon": "test", "input": "test"} ) await hass.async_block_till_done() assert aioclient_mock.call_count == 7 assert aioclient_mock.mock_calls[-1][2] == "test" await hass.services.async_call("hassio", "host_shutdown", {}) await hass.services.async_call("hassio", "host_reboot", {}) await hass.async_block_till_done() assert aioclient_mock.call_count == 9 await hass.services.async_call("hassio", "snapshot_full", {}) await hass.services.async_call( "hassio", "snapshot_partial", {"addons": ["test"], "folders": ["ssl"], "password": "123456"}, ) await hass.async_block_till_done() assert aioclient_mock.call_count == 11 assert aioclient_mock.mock_calls[-1][2] == { "addons": ["test"], "folders": ["ssl"], "password": "123456", } await hass.services.async_call("hassio", "restore_full", {"snapshot": "test"}) await hass.services.async_call( "hassio", "restore_partial", { "snapshot": "test", "homeassistant": False, "addons": ["test"], "folders": ["ssl"], "password": "123456", }, ) await hass.async_block_till_done() assert aioclient_mock.call_count == 13 assert aioclient_mock.mock_calls[-1][2] == { "addons": ["test"], "folders": ["ssl"], "homeassistant": False, "password": "123456", } async def test_service_calls_core(hassio_env, hass, aioclient_mock): """Call core service and check the API calls behind that.""" assert await async_setup_component(hass, "hassio", {}) aioclient_mock.post("http://127.0.0.1/homeassistant/restart", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/homeassistant/stop", json={"result": "ok"}) await hass.services.async_call("homeassistant", "stop") await hass.async_block_till_done() assert aioclient_mock.call_count == 4 await hass.services.async_call("homeassistant", "check_config") await hass.async_block_till_done() assert aioclient_mock.call_count == 4 with patch( "homeassistant.config.async_check_ha_config_file", return_value=None ) as mock_check_config: await hass.services.async_call("homeassistant", "restart") await hass.async_block_till_done() assert mock_check_config.called assert aioclient_mock.call_count == 5
pschmitt/home-assistant
tests/components/hassio/test_init.py
homeassistant/components/bond/entity.py
"""Support for MyChevy.""" from datetime import timedelta import logging import threading import time import mychevy.mychevy as mc import voluptuous as vol from homeassistant.const import CONF_PASSWORD, CONF_USERNAME from homeassistant.helpers import config_validation as cv, discovery from homeassistant.util import Throttle DOMAIN = "mychevy" UPDATE_TOPIC = DOMAIN ERROR_TOPIC = f"{DOMAIN}_error" MYCHEVY_SUCCESS = "success" MYCHEVY_ERROR = "error" NOTIFICATION_ID = "mychevy_website_notification" NOTIFICATION_TITLE = "MyChevy website status" _LOGGER = logging.getLogger(__name__) MIN_TIME_BETWEEN_UPDATES = timedelta(minutes=30) ERROR_SLEEP_TIME = timedelta(minutes=30) CONF_COUNTRY = "country" DEFAULT_COUNTRY = "us" CONFIG_SCHEMA = vol.Schema( { DOMAIN: vol.Schema( { vol.Required(CONF_USERNAME): cv.string, vol.Required(CONF_PASSWORD): cv.string, vol.Optional(CONF_COUNTRY, default=DEFAULT_COUNTRY): vol.All( cv.string, vol.In(["us", "ca"]) ), } ) }, extra=vol.ALLOW_EXTRA, ) class EVSensorConfig: """The EV sensor configuration.""" def __init__( self, name, attr, unit_of_measurement=None, icon=None, extra_attrs=None ): """Create new sensor configuration.""" self.name = name self.attr = attr self.extra_attrs = extra_attrs or [] self.unit_of_measurement = unit_of_measurement self.icon = icon class EVBinarySensorConfig: """The EV binary sensor configuration.""" def __init__(self, name, attr, device_class=None): """Create new binary sensor configuration.""" self.name = name self.attr = attr self.device_class = device_class def setup(hass, base_config): """Set up the mychevy component.""" config = base_config.get(DOMAIN) email = config.get(CONF_USERNAME) password = config.get(CONF_PASSWORD) country = config.get(CONF_COUNTRY) hass.data[DOMAIN] = MyChevyHub( mc.MyChevy(email, password, country), hass, base_config ) hass.data[DOMAIN].start() return True class MyChevyHub(threading.Thread): """MyChevy Hub. Connecting to the mychevy website is done through a selenium webscraping process. That can only run synchronously. In order to prevent blocking of other parts of Home Assistant the architecture launches a polling loop in a thread. When new data is received, sensors are updated, and hass is signaled that there are updates. Sensors are not created until the first update, which will be 60 - 120 seconds after the platform starts. """ def __init__(self, client, hass, hass_config): """Initialize MyChevy Hub.""" super().__init__() self._client = client self.hass = hass self.hass_config = hass_config self.cars = [] self.status = None self.ready = False @Throttle(MIN_TIME_BETWEEN_UPDATES) def update(self): """Update sensors from mychevy website. This is a synchronous polling call that takes a very long time (like 2 to 3 minutes long time) """ self._client.login() self._client.get_cars() self.cars = self._client.cars if self.ready is not True: discovery.load_platform(self.hass, "sensor", DOMAIN, {}, self.hass_config) discovery.load_platform( self.hass, "binary_sensor", DOMAIN, {}, self.hass_config ) self.ready = True self.cars = self._client.update_cars() def get_car(self, vid): """Compatibility to work with one car.""" if self.cars: for car in self.cars: if car.vid == vid: return car return None def run(self): """Thread run loop.""" # We add the status device first outside of the loop # And then busy wait on threads while True: try: _LOGGER.info("Starting mychevy loop") self.update() self.hass.helpers.dispatcher.dispatcher_send(UPDATE_TOPIC) time.sleep(MIN_TIME_BETWEEN_UPDATES.seconds) except Exception: # pylint: disable=broad-except _LOGGER.exception( "Error updating mychevy data. " "This probably means the OnStar link is down again" ) self.hass.helpers.dispatcher.dispatcher_send(ERROR_TOPIC) time.sleep(ERROR_SLEEP_TIME.seconds)
"""The tests for the hassio component.""" import os import pytest from homeassistant.auth.const import GROUP_ID_ADMIN from homeassistant.components import frontend from homeassistant.components.hassio import STORAGE_KEY from homeassistant.setup import async_setup_component from tests.async_mock import patch MOCK_ENVIRON = {"HASSIO": "127.0.0.1", "HASSIO_TOKEN": "abcdefgh"} @pytest.fixture(autouse=True) def mock_all(aioclient_mock): """Mock all setup requests.""" aioclient_mock.post("http://127.0.0.1/homeassistant/options", json={"result": "ok"}) aioclient_mock.get("http://127.0.0.1/supervisor/ping", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/supervisor/options", json={"result": "ok"}) aioclient_mock.get( "http://127.0.0.1/info", json={ "result": "ok", "data": {"supervisor": "222", "homeassistant": "0.110.0", "hassos": None}, }, ) aioclient_mock.get( "http://127.0.0.1/host/info", json={ "result": "ok", "data": { "result": "ok", "data": { "chassis": "vm", "operating_system": "Debian GNU/Linux 10 (buster)", "kernel": "4.19.0-6-amd64", }, }, }, ) aioclient_mock.get( "http://127.0.0.1/ingress/panels", json={"result": "ok", "data": {"panels": {}}} ) async def test_setup_api_ping(hass, aioclient_mock): """Test setup with API ping.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {}) assert result assert aioclient_mock.call_count == 6 assert hass.components.hassio.get_homeassistant_version() == "0.110.0" assert hass.components.hassio.is_hassio() async def test_setup_api_panel(hass, aioclient_mock): """Test setup with API ping.""" assert await async_setup_component(hass, "frontend", {}) with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {}) assert result panels = hass.data[frontend.DATA_PANELS] assert panels.get("hassio").to_response() == { "component_name": "custom", "icon": "hass:home-assistant", "title": "Supervisor", "url_path": "hassio", "require_admin": True, "config": { "_panel_custom": { "embed_iframe": True, "js_url": "/api/hassio/app/entrypoint.js", "name": "hassio-main", "trust_external": False, } }, } async def test_setup_api_push_api_data(hass, aioclient_mock): """Test setup with API push.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component( hass, "hassio", {"http": {"server_port": 9999}, "hassio": {}} ) assert result assert aioclient_mock.call_count == 6 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 9999 assert aioclient_mock.mock_calls[1][2]["watchdog"] async def test_setup_api_push_api_data_server_host(hass, aioclient_mock): """Test setup with API push with active server host.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component( hass, "hassio", {"http": {"server_port": 9999, "server_host": "127.0.0.1"}, "hassio": {}}, ) assert result assert aioclient_mock.call_count == 6 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 9999 assert not aioclient_mock.mock_calls[1][2]["watchdog"] async def test_setup_api_push_api_data_default(hass, aioclient_mock, hass_storage): """Test setup with API push default data.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"http": {}, "hassio": {}}) assert result assert aioclient_mock.call_count == 6 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 8123 refresh_token = aioclient_mock.mock_calls[1][2]["refresh_token"] hassio_user = await hass.auth.async_get_user( hass_storage[STORAGE_KEY]["data"]["hassio_user"] ) assert hassio_user is not None assert hassio_user.system_generated assert len(hassio_user.groups) == 1 assert hassio_user.groups[0].id == GROUP_ID_ADMIN for token in hassio_user.refresh_tokens.values(): if token.token == refresh_token: break else: assert False, "refresh token not found" async def test_setup_adds_admin_group_to_user(hass, aioclient_mock, hass_storage): """Test setup with API push default data.""" # Create user without admin user = await hass.auth.async_create_system_user("Hass.io") assert not user.is_admin await hass.auth.async_create_refresh_token(user) hass_storage[STORAGE_KEY] = { "data": {"hassio_user": user.id}, "key": STORAGE_KEY, "version": 1, } with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"http": {}, "hassio": {}}) assert result assert user.is_admin async def test_setup_api_existing_hassio_user(hass, aioclient_mock, hass_storage): """Test setup with API push default data.""" user = await hass.auth.async_create_system_user("Hass.io test") token = await hass.auth.async_create_refresh_token(user) hass_storage[STORAGE_KEY] = {"version": 1, "data": {"hassio_user": user.id}} with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"http": {}, "hassio": {}}) assert result assert aioclient_mock.call_count == 6 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 8123 assert aioclient_mock.mock_calls[1][2]["refresh_token"] == token.token async def test_setup_core_push_timezone(hass, aioclient_mock): """Test setup with API push default data.""" hass.config.time_zone = "testzone" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"hassio": {}}) assert result assert aioclient_mock.call_count == 6 assert aioclient_mock.mock_calls[2][2]["timezone"] == "testzone" await hass.config.async_update(time_zone="America/New_York") await hass.async_block_till_done() assert aioclient_mock.mock_calls[-1][2]["timezone"] == "America/New_York" async def test_setup_hassio_no_additional_data(hass, aioclient_mock): """Test setup with API push default data.""" with patch.dict(os.environ, MOCK_ENVIRON), patch.dict( os.environ, {"HASSIO_TOKEN": "123456"} ): result = await async_setup_component(hass, "hassio", {"hassio": {}}) assert result assert aioclient_mock.call_count == 6 assert aioclient_mock.mock_calls[-1][3]["X-Hassio-Key"] == "123456" async def test_fail_setup_without_environ_var(hass): """Fail setup if no environ variable set.""" with patch.dict(os.environ, {}, clear=True): result = await async_setup_component(hass, "hassio", {}) assert not result async def test_warn_when_cannot_connect(hass, caplog): """Fail warn when we cannot connect.""" with patch.dict(os.environ, MOCK_ENVIRON), patch( "homeassistant.components.hassio.HassIO.is_connected", return_value=None, ): result = await async_setup_component(hass, "hassio", {}) assert result assert hass.components.hassio.is_hassio() assert "Not connected with Hass.io / system too busy!" in caplog.text async def test_service_register(hassio_env, hass): """Check if service will be setup.""" assert await async_setup_component(hass, "hassio", {}) assert hass.services.has_service("hassio", "addon_start") assert hass.services.has_service("hassio", "addon_stop") assert hass.services.has_service("hassio", "addon_restart") assert hass.services.has_service("hassio", "addon_stdin") assert hass.services.has_service("hassio", "host_shutdown") assert hass.services.has_service("hassio", "host_reboot") assert hass.services.has_service("hassio", "host_reboot") assert hass.services.has_service("hassio", "snapshot_full") assert hass.services.has_service("hassio", "snapshot_partial") assert hass.services.has_service("hassio", "restore_full") assert hass.services.has_service("hassio", "restore_partial") async def test_service_calls(hassio_env, hass, aioclient_mock): """Call service and check the API calls behind that.""" assert await async_setup_component(hass, "hassio", {}) aioclient_mock.post("http://127.0.0.1/addons/test/start", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/addons/test/stop", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/addons/test/restart", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/addons/test/stdin", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/host/shutdown", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/host/reboot", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/snapshots/new/full", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/snapshots/new/partial", json={"result": "ok"}) aioclient_mock.post( "http://127.0.0.1/snapshots/test/restore/full", json={"result": "ok"} ) aioclient_mock.post( "http://127.0.0.1/snapshots/test/restore/partial", json={"result": "ok"} ) await hass.services.async_call("hassio", "addon_start", {"addon": "test"}) await hass.services.async_call("hassio", "addon_stop", {"addon": "test"}) await hass.services.async_call("hassio", "addon_restart", {"addon": "test"}) await hass.services.async_call( "hassio", "addon_stdin", {"addon": "test", "input": "test"} ) await hass.async_block_till_done() assert aioclient_mock.call_count == 7 assert aioclient_mock.mock_calls[-1][2] == "test" await hass.services.async_call("hassio", "host_shutdown", {}) await hass.services.async_call("hassio", "host_reboot", {}) await hass.async_block_till_done() assert aioclient_mock.call_count == 9 await hass.services.async_call("hassio", "snapshot_full", {}) await hass.services.async_call( "hassio", "snapshot_partial", {"addons": ["test"], "folders": ["ssl"], "password": "123456"}, ) await hass.async_block_till_done() assert aioclient_mock.call_count == 11 assert aioclient_mock.mock_calls[-1][2] == { "addons": ["test"], "folders": ["ssl"], "password": "123456", } await hass.services.async_call("hassio", "restore_full", {"snapshot": "test"}) await hass.services.async_call( "hassio", "restore_partial", { "snapshot": "test", "homeassistant": False, "addons": ["test"], "folders": ["ssl"], "password": "123456", }, ) await hass.async_block_till_done() assert aioclient_mock.call_count == 13 assert aioclient_mock.mock_calls[-1][2] == { "addons": ["test"], "folders": ["ssl"], "homeassistant": False, "password": "123456", } async def test_service_calls_core(hassio_env, hass, aioclient_mock): """Call core service and check the API calls behind that.""" assert await async_setup_component(hass, "hassio", {}) aioclient_mock.post("http://127.0.0.1/homeassistant/restart", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/homeassistant/stop", json={"result": "ok"}) await hass.services.async_call("homeassistant", "stop") await hass.async_block_till_done() assert aioclient_mock.call_count == 4 await hass.services.async_call("homeassistant", "check_config") await hass.async_block_till_done() assert aioclient_mock.call_count == 4 with patch( "homeassistant.config.async_check_ha_config_file", return_value=None ) as mock_check_config: await hass.services.async_call("homeassistant", "restart") await hass.async_block_till_done() assert mock_check_config.called assert aioclient_mock.call_count == 5
pschmitt/home-assistant
tests/components/hassio/test_init.py
homeassistant/components/mychevy/__init__.py
"""The BleBox devices integration.""" import asyncio import logging from blebox_uniapi.error import Error from blebox_uniapi.products import Products from blebox_uniapi.session import ApiHost from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_HOST, CONF_PORT from homeassistant.core import HomeAssistant, callback from homeassistant.exceptions import ConfigEntryNotReady from homeassistant.helpers.aiohttp_client import async_get_clientsession from homeassistant.helpers.entity import Entity from .const import DEFAULT_SETUP_TIMEOUT, DOMAIN, PRODUCT _LOGGER = logging.getLogger(__name__) PLATFORMS = ["cover", "sensor", "switch", "air_quality", "light", "climate"] PARALLEL_UPDATES = 0 async def async_setup(hass: HomeAssistant, config: dict): """Set up the BleBox devices component.""" return True async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry): """Set up BleBox devices from a config entry.""" websession = async_get_clientsession(hass) host = entry.data[CONF_HOST] port = entry.data[CONF_PORT] timeout = DEFAULT_SETUP_TIMEOUT api_host = ApiHost(host, port, timeout, websession, hass.loop) try: product = await Products.async_from_host(api_host) except Error as ex: _LOGGER.error("Identify failed at %s:%d (%s)", api_host.host, api_host.port, ex) raise ConfigEntryNotReady from ex domain = hass.data.setdefault(DOMAIN, {}) domain_entry = domain.setdefault(entry.entry_id, {}) product = domain_entry.setdefault(PRODUCT, product) for component in PLATFORMS: hass.async_create_task( hass.config_entries.async_forward_entry_setup(entry, component) ) return True async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry): """Unload a config entry.""" unload_ok = all( await asyncio.gather( *[ hass.config_entries.async_forward_entry_unload(entry, platform) for platform in PLATFORMS ] ) ) if unload_ok: hass.data[DOMAIN].pop(entry.entry_id) return unload_ok @callback def create_blebox_entities( hass, config_entry, async_add_entities, entity_klass, entity_type ): """Create entities from a BleBox product's features.""" product = hass.data[DOMAIN][config_entry.entry_id][PRODUCT] entities = [] if entity_type in product.features: for feature in product.features[entity_type]: entities.append(entity_klass(feature)) async_add_entities(entities, True) class BleBoxEntity(Entity): """Implements a common class for entities representing a BleBox feature.""" def __init__(self, feature): """Initialize a BleBox entity.""" self._feature = feature @property def name(self): """Return the internal entity name.""" return self._feature.full_name @property def unique_id(self): """Return a unique id.""" return self._feature.unique_id async def async_update(self): """Update the entity state.""" try: await self._feature.async_update() except Error as ex: _LOGGER.error("Updating '%s' failed: %s", self.name, ex) @property def device_info(self): """Return device information for this entity.""" product = self._feature.product return { "identifiers": {(DOMAIN, product.unique_id)}, "name": product.name, "manufacturer": product.brand, "model": product.model, "sw_version": product.firmware_version, }
"""The tests for the hassio component.""" import os import pytest from homeassistant.auth.const import GROUP_ID_ADMIN from homeassistant.components import frontend from homeassistant.components.hassio import STORAGE_KEY from homeassistant.setup import async_setup_component from tests.async_mock import patch MOCK_ENVIRON = {"HASSIO": "127.0.0.1", "HASSIO_TOKEN": "abcdefgh"} @pytest.fixture(autouse=True) def mock_all(aioclient_mock): """Mock all setup requests.""" aioclient_mock.post("http://127.0.0.1/homeassistant/options", json={"result": "ok"}) aioclient_mock.get("http://127.0.0.1/supervisor/ping", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/supervisor/options", json={"result": "ok"}) aioclient_mock.get( "http://127.0.0.1/info", json={ "result": "ok", "data": {"supervisor": "222", "homeassistant": "0.110.0", "hassos": None}, }, ) aioclient_mock.get( "http://127.0.0.1/host/info", json={ "result": "ok", "data": { "result": "ok", "data": { "chassis": "vm", "operating_system": "Debian GNU/Linux 10 (buster)", "kernel": "4.19.0-6-amd64", }, }, }, ) aioclient_mock.get( "http://127.0.0.1/ingress/panels", json={"result": "ok", "data": {"panels": {}}} ) async def test_setup_api_ping(hass, aioclient_mock): """Test setup with API ping.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {}) assert result assert aioclient_mock.call_count == 6 assert hass.components.hassio.get_homeassistant_version() == "0.110.0" assert hass.components.hassio.is_hassio() async def test_setup_api_panel(hass, aioclient_mock): """Test setup with API ping.""" assert await async_setup_component(hass, "frontend", {}) with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {}) assert result panels = hass.data[frontend.DATA_PANELS] assert panels.get("hassio").to_response() == { "component_name": "custom", "icon": "hass:home-assistant", "title": "Supervisor", "url_path": "hassio", "require_admin": True, "config": { "_panel_custom": { "embed_iframe": True, "js_url": "/api/hassio/app/entrypoint.js", "name": "hassio-main", "trust_external": False, } }, } async def test_setup_api_push_api_data(hass, aioclient_mock): """Test setup with API push.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component( hass, "hassio", {"http": {"server_port": 9999}, "hassio": {}} ) assert result assert aioclient_mock.call_count == 6 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 9999 assert aioclient_mock.mock_calls[1][2]["watchdog"] async def test_setup_api_push_api_data_server_host(hass, aioclient_mock): """Test setup with API push with active server host.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component( hass, "hassio", {"http": {"server_port": 9999, "server_host": "127.0.0.1"}, "hassio": {}}, ) assert result assert aioclient_mock.call_count == 6 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 9999 assert not aioclient_mock.mock_calls[1][2]["watchdog"] async def test_setup_api_push_api_data_default(hass, aioclient_mock, hass_storage): """Test setup with API push default data.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"http": {}, "hassio": {}}) assert result assert aioclient_mock.call_count == 6 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 8123 refresh_token = aioclient_mock.mock_calls[1][2]["refresh_token"] hassio_user = await hass.auth.async_get_user( hass_storage[STORAGE_KEY]["data"]["hassio_user"] ) assert hassio_user is not None assert hassio_user.system_generated assert len(hassio_user.groups) == 1 assert hassio_user.groups[0].id == GROUP_ID_ADMIN for token in hassio_user.refresh_tokens.values(): if token.token == refresh_token: break else: assert False, "refresh token not found" async def test_setup_adds_admin_group_to_user(hass, aioclient_mock, hass_storage): """Test setup with API push default data.""" # Create user without admin user = await hass.auth.async_create_system_user("Hass.io") assert not user.is_admin await hass.auth.async_create_refresh_token(user) hass_storage[STORAGE_KEY] = { "data": {"hassio_user": user.id}, "key": STORAGE_KEY, "version": 1, } with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"http": {}, "hassio": {}}) assert result assert user.is_admin async def test_setup_api_existing_hassio_user(hass, aioclient_mock, hass_storage): """Test setup with API push default data.""" user = await hass.auth.async_create_system_user("Hass.io test") token = await hass.auth.async_create_refresh_token(user) hass_storage[STORAGE_KEY] = {"version": 1, "data": {"hassio_user": user.id}} with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"http": {}, "hassio": {}}) assert result assert aioclient_mock.call_count == 6 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 8123 assert aioclient_mock.mock_calls[1][2]["refresh_token"] == token.token async def test_setup_core_push_timezone(hass, aioclient_mock): """Test setup with API push default data.""" hass.config.time_zone = "testzone" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"hassio": {}}) assert result assert aioclient_mock.call_count == 6 assert aioclient_mock.mock_calls[2][2]["timezone"] == "testzone" await hass.config.async_update(time_zone="America/New_York") await hass.async_block_till_done() assert aioclient_mock.mock_calls[-1][2]["timezone"] == "America/New_York" async def test_setup_hassio_no_additional_data(hass, aioclient_mock): """Test setup with API push default data.""" with patch.dict(os.environ, MOCK_ENVIRON), patch.dict( os.environ, {"HASSIO_TOKEN": "123456"} ): result = await async_setup_component(hass, "hassio", {"hassio": {}}) assert result assert aioclient_mock.call_count == 6 assert aioclient_mock.mock_calls[-1][3]["X-Hassio-Key"] == "123456" async def test_fail_setup_without_environ_var(hass): """Fail setup if no environ variable set.""" with patch.dict(os.environ, {}, clear=True): result = await async_setup_component(hass, "hassio", {}) assert not result async def test_warn_when_cannot_connect(hass, caplog): """Fail warn when we cannot connect.""" with patch.dict(os.environ, MOCK_ENVIRON), patch( "homeassistant.components.hassio.HassIO.is_connected", return_value=None, ): result = await async_setup_component(hass, "hassio", {}) assert result assert hass.components.hassio.is_hassio() assert "Not connected with Hass.io / system too busy!" in caplog.text async def test_service_register(hassio_env, hass): """Check if service will be setup.""" assert await async_setup_component(hass, "hassio", {}) assert hass.services.has_service("hassio", "addon_start") assert hass.services.has_service("hassio", "addon_stop") assert hass.services.has_service("hassio", "addon_restart") assert hass.services.has_service("hassio", "addon_stdin") assert hass.services.has_service("hassio", "host_shutdown") assert hass.services.has_service("hassio", "host_reboot") assert hass.services.has_service("hassio", "host_reboot") assert hass.services.has_service("hassio", "snapshot_full") assert hass.services.has_service("hassio", "snapshot_partial") assert hass.services.has_service("hassio", "restore_full") assert hass.services.has_service("hassio", "restore_partial") async def test_service_calls(hassio_env, hass, aioclient_mock): """Call service and check the API calls behind that.""" assert await async_setup_component(hass, "hassio", {}) aioclient_mock.post("http://127.0.0.1/addons/test/start", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/addons/test/stop", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/addons/test/restart", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/addons/test/stdin", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/host/shutdown", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/host/reboot", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/snapshots/new/full", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/snapshots/new/partial", json={"result": "ok"}) aioclient_mock.post( "http://127.0.0.1/snapshots/test/restore/full", json={"result": "ok"} ) aioclient_mock.post( "http://127.0.0.1/snapshots/test/restore/partial", json={"result": "ok"} ) await hass.services.async_call("hassio", "addon_start", {"addon": "test"}) await hass.services.async_call("hassio", "addon_stop", {"addon": "test"}) await hass.services.async_call("hassio", "addon_restart", {"addon": "test"}) await hass.services.async_call( "hassio", "addon_stdin", {"addon": "test", "input": "test"} ) await hass.async_block_till_done() assert aioclient_mock.call_count == 7 assert aioclient_mock.mock_calls[-1][2] == "test" await hass.services.async_call("hassio", "host_shutdown", {}) await hass.services.async_call("hassio", "host_reboot", {}) await hass.async_block_till_done() assert aioclient_mock.call_count == 9 await hass.services.async_call("hassio", "snapshot_full", {}) await hass.services.async_call( "hassio", "snapshot_partial", {"addons": ["test"], "folders": ["ssl"], "password": "123456"}, ) await hass.async_block_till_done() assert aioclient_mock.call_count == 11 assert aioclient_mock.mock_calls[-1][2] == { "addons": ["test"], "folders": ["ssl"], "password": "123456", } await hass.services.async_call("hassio", "restore_full", {"snapshot": "test"}) await hass.services.async_call( "hassio", "restore_partial", { "snapshot": "test", "homeassistant": False, "addons": ["test"], "folders": ["ssl"], "password": "123456", }, ) await hass.async_block_till_done() assert aioclient_mock.call_count == 13 assert aioclient_mock.mock_calls[-1][2] == { "addons": ["test"], "folders": ["ssl"], "homeassistant": False, "password": "123456", } async def test_service_calls_core(hassio_env, hass, aioclient_mock): """Call core service and check the API calls behind that.""" assert await async_setup_component(hass, "hassio", {}) aioclient_mock.post("http://127.0.0.1/homeassistant/restart", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/homeassistant/stop", json={"result": "ok"}) await hass.services.async_call("homeassistant", "stop") await hass.async_block_till_done() assert aioclient_mock.call_count == 4 await hass.services.async_call("homeassistant", "check_config") await hass.async_block_till_done() assert aioclient_mock.call_count == 4 with patch( "homeassistant.config.async_check_ha_config_file", return_value=None ) as mock_check_config: await hass.services.async_call("homeassistant", "restart") await hass.async_block_till_done() assert mock_check_config.called assert aioclient_mock.call_count == 5
pschmitt/home-assistant
tests/components/hassio/test_init.py
homeassistant/components/blebox/__init__.py
"""Support for Z-Wave.""" # pylint: disable=import-outside-toplevel import asyncio import copy from importlib import import_module import logging from pprint import pprint import voluptuous as vol from homeassistant import config_entries from homeassistant.const import ( ATTR_ENTITY_ID, EVENT_HOMEASSISTANT_START, EVENT_HOMEASSISTANT_STOP, ) from homeassistant.core import CoreState, callback from homeassistant.helpers import discovery import homeassistant.helpers.config_validation as cv from homeassistant.helpers.device_registry import ( async_get_registry as async_get_device_registry, ) from homeassistant.helpers.dispatcher import ( async_dispatcher_connect, async_dispatcher_send, ) from homeassistant.helpers.entity import generate_entity_id from homeassistant.helpers.entity_component import DEFAULT_SCAN_INTERVAL from homeassistant.helpers.entity_platform import EntityPlatform from homeassistant.helpers.entity_registry import ( async_get_registry as async_get_entity_registry, ) from homeassistant.helpers.entity_values import EntityValues from homeassistant.helpers.event import async_track_time_change from homeassistant.util import convert import homeassistant.util.dt as dt_util from . import config_flow # noqa: F401 pylint: disable=unused-import from . import const, websocket_api as wsapi, workaround from .const import ( CONF_AUTOHEAL, CONF_CONFIG_PATH, CONF_DEBUG, CONF_NETWORK_KEY, CONF_POLLING_INTERVAL, CONF_USB_STICK_PATH, DATA_DEVICES, DATA_ENTITY_VALUES, DATA_NETWORK, DATA_ZWAVE_CONFIG, DEFAULT_CONF_AUTOHEAL, DEFAULT_CONF_USB_STICK_PATH, DEFAULT_DEBUG, DEFAULT_POLLING_INTERVAL, DOMAIN, ) from .discovery_schemas import DISCOVERY_SCHEMAS from .node_entity import ZWaveBaseEntity, ZWaveNodeEntity from .util import ( check_has_unique_id, check_node_schema, check_value_schema, is_node_parsed, node_device_id_and_name, node_name, ) _LOGGER = logging.getLogger(__name__) CLASS_ID = "class_id" ATTR_POWER = "power_consumption" CONF_POLLING_INTENSITY = "polling_intensity" CONF_IGNORED = "ignored" CONF_INVERT_OPENCLOSE_BUTTONS = "invert_openclose_buttons" CONF_INVERT_PERCENT = "invert_percent" CONF_REFRESH_VALUE = "refresh_value" CONF_REFRESH_DELAY = "delay" CONF_DEVICE_CONFIG = "device_config" CONF_DEVICE_CONFIG_GLOB = "device_config_glob" CONF_DEVICE_CONFIG_DOMAIN = "device_config_domain" DEFAULT_CONF_IGNORED = False DEFAULT_CONF_INVERT_OPENCLOSE_BUTTONS = False DEFAULT_CONF_INVERT_PERCENT = False DEFAULT_CONF_REFRESH_VALUE = False DEFAULT_CONF_REFRESH_DELAY = 5 SUPPORTED_PLATFORMS = [ "binary_sensor", "climate", "cover", "fan", "lock", "light", "sensor", "switch", ] RENAME_NODE_SCHEMA = vol.Schema( { vol.Required(const.ATTR_NODE_ID): vol.Coerce(int), vol.Required(const.ATTR_NAME): cv.string, vol.Optional(const.ATTR_UPDATE_IDS, default=False): cv.boolean, } ) RENAME_VALUE_SCHEMA = vol.Schema( { vol.Required(const.ATTR_NODE_ID): vol.Coerce(int), vol.Required(const.ATTR_VALUE_ID): vol.Coerce(int), vol.Required(const.ATTR_NAME): cv.string, vol.Optional(const.ATTR_UPDATE_IDS, default=False): cv.boolean, } ) SET_CONFIG_PARAMETER_SCHEMA = vol.Schema( { vol.Required(const.ATTR_NODE_ID): vol.Coerce(int), vol.Required(const.ATTR_CONFIG_PARAMETER): vol.Coerce(int), vol.Required(const.ATTR_CONFIG_VALUE): vol.Any(vol.Coerce(int), cv.string), vol.Optional(const.ATTR_CONFIG_SIZE, default=2): vol.Coerce(int), } ) SET_NODE_VALUE_SCHEMA = vol.Schema( { vol.Required(const.ATTR_NODE_ID): vol.Coerce(int), vol.Required(const.ATTR_VALUE_ID): vol.Any(vol.Coerce(int), cv.string), vol.Required(const.ATTR_CONFIG_VALUE): vol.Any(vol.Coerce(int), cv.string), } ) REFRESH_NODE_VALUE_SCHEMA = vol.Schema( { vol.Required(const.ATTR_NODE_ID): vol.Coerce(int), vol.Required(const.ATTR_VALUE_ID): vol.Coerce(int), } ) SET_POLL_INTENSITY_SCHEMA = vol.Schema( { vol.Required(const.ATTR_NODE_ID): vol.Coerce(int), vol.Required(const.ATTR_VALUE_ID): vol.Coerce(int), vol.Required(const.ATTR_POLL_INTENSITY): vol.Coerce(int), } ) PRINT_CONFIG_PARAMETER_SCHEMA = vol.Schema( { vol.Required(const.ATTR_NODE_ID): vol.Coerce(int), vol.Required(const.ATTR_CONFIG_PARAMETER): vol.Coerce(int), } ) NODE_SERVICE_SCHEMA = vol.Schema({vol.Required(const.ATTR_NODE_ID): vol.Coerce(int)}) REFRESH_ENTITY_SCHEMA = vol.Schema({vol.Required(ATTR_ENTITY_ID): cv.entity_id}) RESET_NODE_METERS_SCHEMA = vol.Schema( { vol.Required(const.ATTR_NODE_ID): vol.Coerce(int), vol.Optional(const.ATTR_INSTANCE, default=1): vol.Coerce(int), } ) CHANGE_ASSOCIATION_SCHEMA = vol.Schema( { vol.Required(const.ATTR_ASSOCIATION): cv.string, vol.Required(const.ATTR_NODE_ID): vol.Coerce(int), vol.Required(const.ATTR_TARGET_NODE_ID): vol.Coerce(int), vol.Required(const.ATTR_GROUP): vol.Coerce(int), vol.Optional(const.ATTR_INSTANCE, default=0x00): vol.Coerce(int), } ) SET_WAKEUP_SCHEMA = vol.Schema( { vol.Required(const.ATTR_NODE_ID): vol.Coerce(int), vol.Required(const.ATTR_CONFIG_VALUE): vol.All( vol.Coerce(int), cv.positive_int ), } ) HEAL_NODE_SCHEMA = vol.Schema( { vol.Required(const.ATTR_NODE_ID): vol.Coerce(int), vol.Optional(const.ATTR_RETURN_ROUTES, default=False): cv.boolean, } ) TEST_NODE_SCHEMA = vol.Schema( { vol.Required(const.ATTR_NODE_ID): vol.Coerce(int), vol.Optional(const.ATTR_MESSAGES, default=1): cv.positive_int, } ) DEVICE_CONFIG_SCHEMA_ENTRY = vol.Schema( { vol.Optional(CONF_POLLING_INTENSITY): cv.positive_int, vol.Optional(CONF_IGNORED, default=DEFAULT_CONF_IGNORED): cv.boolean, vol.Optional( CONF_INVERT_OPENCLOSE_BUTTONS, default=DEFAULT_CONF_INVERT_OPENCLOSE_BUTTONS ): cv.boolean, vol.Optional( CONF_INVERT_PERCENT, default=DEFAULT_CONF_INVERT_PERCENT ): cv.boolean, vol.Optional( CONF_REFRESH_VALUE, default=DEFAULT_CONF_REFRESH_VALUE ): cv.boolean, vol.Optional( CONF_REFRESH_DELAY, default=DEFAULT_CONF_REFRESH_DELAY ): cv.positive_int, } ) SIGNAL_REFRESH_ENTITY_FORMAT = "zwave_refresh_entity_{}" CONFIG_SCHEMA = vol.Schema( { DOMAIN: vol.Schema( { vol.Optional(CONF_AUTOHEAL, default=DEFAULT_CONF_AUTOHEAL): cv.boolean, vol.Optional(CONF_CONFIG_PATH): cv.string, vol.Optional(CONF_NETWORK_KEY): vol.All( cv.string, vol.Match(r"(0x\w\w,\s?){15}0x\w\w") ), vol.Optional(CONF_DEVICE_CONFIG, default={}): vol.Schema( {cv.entity_id: DEVICE_CONFIG_SCHEMA_ENTRY} ), vol.Optional(CONF_DEVICE_CONFIG_GLOB, default={}): vol.Schema( {cv.string: DEVICE_CONFIG_SCHEMA_ENTRY} ), vol.Optional(CONF_DEVICE_CONFIG_DOMAIN, default={}): vol.Schema( {cv.string: DEVICE_CONFIG_SCHEMA_ENTRY} ), vol.Optional(CONF_DEBUG, default=DEFAULT_DEBUG): cv.boolean, vol.Optional( CONF_POLLING_INTERVAL, default=DEFAULT_POLLING_INTERVAL ): cv.positive_int, vol.Optional(CONF_USB_STICK_PATH): cv.string, } ) }, extra=vol.ALLOW_EXTRA, ) def _obj_to_dict(obj): """Convert an object into a hash for debug.""" return { key: getattr(obj, key) for key in dir(obj) if key[0] != "_" and not callable(getattr(obj, key)) } def _value_name(value): """Return the name of the value.""" return f"{node_name(value.node)} {value.label}".strip() def nice_print_node(node): """Print a nice formatted node to the output (debug method).""" node_dict = _obj_to_dict(node) node_dict["values"] = { value_id: _obj_to_dict(value) for value_id, value in node.values.items() } _LOGGER.info("FOUND NODE %s \n%s", node.product_name, node_dict) def get_config_value(node, value_index, tries=5): """Return the current configuration value for a specific index.""" try: for value in node.values.values(): if ( value.command_class == const.COMMAND_CLASS_CONFIGURATION and value.index == value_index ): return value.data except RuntimeError: # If we get a runtime error the dict has changed while # we was looking for a value, just do it again return ( None if tries <= 0 else get_config_value(node, value_index, tries=tries - 1) ) return None async def async_setup_platform(hass, config, async_add_entities, discovery_info=None): """Set up the Z-Wave platform (generic part).""" if discovery_info is None or DATA_NETWORK not in hass.data: return False device = hass.data[DATA_DEVICES].get(discovery_info[const.DISCOVERY_DEVICE]) if device is None: return False async_add_entities([device]) return True async def async_setup(hass, config): """Set up Z-Wave components.""" if DOMAIN not in config: return True conf = config[DOMAIN] hass.data[DATA_ZWAVE_CONFIG] = conf if not hass.config_entries.async_entries(DOMAIN): hass.async_create_task( hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data={ CONF_USB_STICK_PATH: conf.get( CONF_USB_STICK_PATH, DEFAULT_CONF_USB_STICK_PATH ), CONF_NETWORK_KEY: conf.get(CONF_NETWORK_KEY), }, ) ) return True async def async_setup_entry(hass, config_entry): """Set up Z-Wave from a config entry. Will automatically load components to support devices found on the network. """ from pydispatch import dispatcher # pylint: disable=import-error from openzwave.option import ZWaveOption from openzwave.network import ZWaveNetwork from openzwave.group import ZWaveGroup # Merge config entry and yaml config config = config_entry.data if DATA_ZWAVE_CONFIG in hass.data: config = {**config, **hass.data[DATA_ZWAVE_CONFIG]} # Update hass.data with merged config so we can access it elsewhere hass.data[DATA_ZWAVE_CONFIG] = config # Load configuration use_debug = config.get(CONF_DEBUG, DEFAULT_DEBUG) autoheal = config.get(CONF_AUTOHEAL, DEFAULT_CONF_AUTOHEAL) device_config = EntityValues( config.get(CONF_DEVICE_CONFIG), config.get(CONF_DEVICE_CONFIG_DOMAIN), config.get(CONF_DEVICE_CONFIG_GLOB), ) usb_path = config[CONF_USB_STICK_PATH] _LOGGER.info("Z-Wave USB path is %s", usb_path) # Setup options options = ZWaveOption( usb_path, user_path=hass.config.config_dir, config_path=config.get(CONF_CONFIG_PATH), ) options.set_console_output(use_debug) if config.get(CONF_NETWORK_KEY): options.addOption("NetworkKey", config[CONF_NETWORK_KEY]) await hass.async_add_executor_job(options.lock) network = hass.data[DATA_NETWORK] = ZWaveNetwork(options, autostart=False) hass.data[DATA_DEVICES] = {} hass.data[DATA_ENTITY_VALUES] = [] registry = await async_get_entity_registry(hass) wsapi.async_load_websocket_api(hass) if use_debug: # pragma: no cover def log_all(signal, value=None): """Log all the signals.""" print("") print("SIGNAL *****", signal) if value and signal in ( ZWaveNetwork.SIGNAL_VALUE_CHANGED, ZWaveNetwork.SIGNAL_VALUE_ADDED, ZWaveNetwork.SIGNAL_SCENE_EVENT, ZWaveNetwork.SIGNAL_NODE_EVENT, ZWaveNetwork.SIGNAL_AWAKE_NODES_QUERIED, ZWaveNetwork.SIGNAL_ALL_NODES_QUERIED, ZWaveNetwork.SIGNAL_ALL_NODES_QUERIED_SOME_DEAD, ): pprint(_obj_to_dict(value)) print("") dispatcher.connect(log_all, weak=False) def value_added(node, value): """Handle new added value to a node on the network.""" # Check if this value should be tracked by an existing entity for values in hass.data[DATA_ENTITY_VALUES]: values.check_value(value) for schema in DISCOVERY_SCHEMAS: if not check_node_schema(node, schema): continue if not check_value_schema( value, schema[const.DISC_VALUES][const.DISC_PRIMARY] ): continue values = ZWaveDeviceEntityValues( hass, schema, value, config, device_config, registry ) # We create a new list and update the reference here so that # the list can be safely iterated over in the main thread new_values = hass.data[DATA_ENTITY_VALUES] + [values] hass.data[DATA_ENTITY_VALUES] = new_values platform = EntityPlatform( hass=hass, logger=_LOGGER, domain=DOMAIN, platform_name=DOMAIN, platform=None, scan_interval=DEFAULT_SCAN_INTERVAL, entity_namespace=None, ) platform.config_entry = config_entry def node_added(node): """Handle a new node on the network.""" entity = ZWaveNodeEntity(node, network) async def _add_node_to_component(): if hass.data[DATA_DEVICES].get(entity.unique_id): return name = node_name(node) generated_id = generate_entity_id(DOMAIN + ".{}", name, []) node_config = device_config.get(generated_id) if node_config.get(CONF_IGNORED): _LOGGER.info( "Ignoring node entity %s due to device settings", generated_id ) return hass.data[DATA_DEVICES][entity.unique_id] = entity await platform.async_add_entities([entity]) if entity.unique_id: hass.async_add_job(_add_node_to_component()) return @callback def _on_ready(sec): _LOGGER.info("Z-Wave node %d ready after %d seconds", entity.node_id, sec) hass.async_add_job(_add_node_to_component) @callback def _on_timeout(sec): _LOGGER.warning( "Z-Wave node %d not ready after %d seconds, continuing anyway", entity.node_id, sec, ) hass.async_add_job(_add_node_to_component) hass.add_job(check_has_unique_id, entity, _on_ready, _on_timeout) def node_removed(node): node_id = node.node_id node_key = f"node-{node_id}" for key in list(hass.data[DATA_DEVICES]): if key is None: continue if not key.startswith(f"{node_id}-"): continue entity = hass.data[DATA_DEVICES][key] _LOGGER.debug( "Removing Entity - value: %s - entity_id: %s", key, entity.entity_id ) hass.add_job(entity.node_removed()) del hass.data[DATA_DEVICES][key] entity = hass.data[DATA_DEVICES][node_key] hass.add_job(entity.node_removed()) del hass.data[DATA_DEVICES][node_key] hass.add_job(_remove_device(node)) async def _remove_device(node): dev_reg = await async_get_device_registry(hass) identifier, name = node_device_id_and_name(node) device = dev_reg.async_get_device(identifiers={identifier}, connections=set()) if device is not None: _LOGGER.debug("Removing Device - %s - %s", device.id, name) dev_reg.async_remove_device(device.id) def network_ready(): """Handle the query of all awake nodes.""" _LOGGER.info( "Z-Wave network is ready for use. All awake nodes " "have been queried. Sleeping nodes will be " "queried when they awake" ) hass.bus.fire(const.EVENT_NETWORK_READY) def network_complete(): """Handle the querying of all nodes on network.""" _LOGGER.info( "Z-Wave network is complete. All nodes on the network have been queried" ) hass.bus.fire(const.EVENT_NETWORK_COMPLETE) def network_complete_some_dead(): """Handle the querying of all nodes on network.""" _LOGGER.info( "Z-Wave network is complete. All nodes on the network " "have been queried, but some nodes are marked dead" ) hass.bus.fire(const.EVENT_NETWORK_COMPLETE_SOME_DEAD) dispatcher.connect(value_added, ZWaveNetwork.SIGNAL_VALUE_ADDED, weak=False) dispatcher.connect(node_added, ZWaveNetwork.SIGNAL_NODE_ADDED, weak=False) dispatcher.connect(node_removed, ZWaveNetwork.SIGNAL_NODE_REMOVED, weak=False) dispatcher.connect( network_ready, ZWaveNetwork.SIGNAL_AWAKE_NODES_QUERIED, weak=False ) dispatcher.connect( network_complete, ZWaveNetwork.SIGNAL_ALL_NODES_QUERIED, weak=False ) dispatcher.connect( network_complete_some_dead, ZWaveNetwork.SIGNAL_ALL_NODES_QUERIED_SOME_DEAD, weak=False, ) def add_node(service): """Switch into inclusion mode.""" _LOGGER.info("Z-Wave add_node have been initialized") network.controller.add_node() def add_node_secure(service): """Switch into secure inclusion mode.""" _LOGGER.info("Z-Wave add_node_secure have been initialized") network.controller.add_node(True) def remove_node(service): """Switch into exclusion mode.""" _LOGGER.info("Z-Wave remove_node have been initialized") network.controller.remove_node() def cancel_command(service): """Cancel a running controller command.""" _LOGGER.info("Cancel running Z-Wave command") network.controller.cancel_command() def heal_network(service): """Heal the network.""" _LOGGER.info("Z-Wave heal running") network.heal() def soft_reset(service): """Soft reset the controller.""" _LOGGER.info("Z-Wave soft_reset have been initialized") network.controller.soft_reset() def test_network(service): """Test the network by sending commands to all the nodes.""" _LOGGER.info("Z-Wave test_network have been initialized") network.test() def stop_network(_service_or_event): """Stop Z-Wave network.""" _LOGGER.info("Stopping Z-Wave network") network.stop() if hass.state == CoreState.running: hass.bus.fire(const.EVENT_NETWORK_STOP) async def rename_node(service): """Rename a node.""" node_id = service.data.get(const.ATTR_NODE_ID) node = network.nodes[node_id] name = service.data.get(const.ATTR_NAME) node.name = name _LOGGER.info("Renamed Z-Wave node %d to %s", node_id, name) update_ids = service.data.get(const.ATTR_UPDATE_IDS) # We want to rename the device, the node entity, # and all the contained entities node_key = f"node-{node_id}" entity = hass.data[DATA_DEVICES][node_key] await entity.node_renamed(update_ids) for key in list(hass.data[DATA_DEVICES]): if not key.startswith(f"{node_id}-"): continue entity = hass.data[DATA_DEVICES][key] await entity.value_renamed(update_ids) async def rename_value(service): """Rename a node value.""" node_id = service.data.get(const.ATTR_NODE_ID) value_id = service.data.get(const.ATTR_VALUE_ID) node = network.nodes[node_id] value = node.values[value_id] name = service.data.get(const.ATTR_NAME) value.label = name _LOGGER.info( "Renamed Z-Wave value (Node %d Value %d) to %s", node_id, value_id, name ) update_ids = service.data.get(const.ATTR_UPDATE_IDS) value_key = f"{node_id}-{value_id}" entity = hass.data[DATA_DEVICES][value_key] await entity.value_renamed(update_ids) def set_poll_intensity(service): """Set the polling intensity of a node value.""" node_id = service.data.get(const.ATTR_NODE_ID) value_id = service.data.get(const.ATTR_VALUE_ID) node = network.nodes[node_id] value = node.values[value_id] intensity = service.data.get(const.ATTR_POLL_INTENSITY) if intensity == 0: if value.disable_poll(): _LOGGER.info("Polling disabled (Node %d Value %d)", node_id, value_id) return _LOGGER.info( "Polling disabled failed (Node %d Value %d)", node_id, value_id ) else: if value.enable_poll(intensity): _LOGGER.info( "Set polling intensity (Node %d Value %d) to %s", node_id, value_id, intensity, ) return _LOGGER.info( "Set polling intensity failed (Node %d Value %d)", node_id, value_id ) def remove_failed_node(service): """Remove failed node.""" node_id = service.data.get(const.ATTR_NODE_ID) _LOGGER.info("Trying to remove zwave node %d", node_id) network.controller.remove_failed_node(node_id) def replace_failed_node(service): """Replace failed node.""" node_id = service.data.get(const.ATTR_NODE_ID) _LOGGER.info("Trying to replace zwave node %d", node_id) network.controller.replace_failed_node(node_id) def set_config_parameter(service): """Set a config parameter to a node.""" node_id = service.data.get(const.ATTR_NODE_ID) node = network.nodes[node_id] param = service.data.get(const.ATTR_CONFIG_PARAMETER) selection = service.data.get(const.ATTR_CONFIG_VALUE) size = service.data.get(const.ATTR_CONFIG_SIZE) for value in node.get_values( class_id=const.COMMAND_CLASS_CONFIGURATION ).values(): if value.index != param: continue if value.type == const.TYPE_BOOL: value.data = int(selection == "True") _LOGGER.info( "Setting configuration parameter %s on Node %s with bool selection %s", param, node_id, str(selection), ) return if value.type == const.TYPE_LIST: value.data = str(selection) _LOGGER.info( "Setting configuration parameter %s on Node %s with list selection %s", param, node_id, str(selection), ) return if value.type == const.TYPE_BUTTON: network.manager.pressButton(value.value_id) network.manager.releaseButton(value.value_id) _LOGGER.info( "Setting configuration parameter %s on Node %s " "with button selection %s", param, node_id, selection, ) return value.data = int(selection) _LOGGER.info( "Setting configuration parameter %s on Node %s with selection %s", param, node_id, selection, ) return node.set_config_param(param, selection, size) _LOGGER.info( "Setting unknown configuration parameter %s on Node %s with selection %s", param, node_id, selection, ) def refresh_node_value(service): """Refresh the specified value from a node.""" node_id = service.data.get(const.ATTR_NODE_ID) value_id = service.data.get(const.ATTR_VALUE_ID) node = network.nodes[node_id] node.values[value_id].refresh() _LOGGER.info("Node %s value %s refreshed", node_id, value_id) def set_node_value(service): """Set the specified value on a node.""" node_id = service.data.get(const.ATTR_NODE_ID) value_id = service.data.get(const.ATTR_VALUE_ID) value = service.data.get(const.ATTR_CONFIG_VALUE) node = network.nodes[node_id] node.values[value_id].data = value _LOGGER.info("Node %s value %s set to %s", node_id, value_id, value) def print_config_parameter(service): """Print a config parameter from a node.""" node_id = service.data.get(const.ATTR_NODE_ID) node = network.nodes[node_id] param = service.data.get(const.ATTR_CONFIG_PARAMETER) _LOGGER.info( "Config parameter %s on Node %s: %s", param, node_id, get_config_value(node, param), ) def print_node(service): """Print all information about z-wave node.""" node_id = service.data.get(const.ATTR_NODE_ID) node = network.nodes[node_id] nice_print_node(node) def set_wakeup(service): """Set wake-up interval of a node.""" node_id = service.data.get(const.ATTR_NODE_ID) node = network.nodes[node_id] value = service.data.get(const.ATTR_CONFIG_VALUE) if node.can_wake_up(): for value_id in node.get_values(class_id=const.COMMAND_CLASS_WAKE_UP): node.values[value_id].data = value _LOGGER.info("Node %s wake-up set to %d", node_id, value) else: _LOGGER.info("Node %s is not wakeable", node_id) def change_association(service): """Change an association in the zwave network.""" association_type = service.data.get(const.ATTR_ASSOCIATION) node_id = service.data.get(const.ATTR_NODE_ID) target_node_id = service.data.get(const.ATTR_TARGET_NODE_ID) group = service.data.get(const.ATTR_GROUP) instance = service.data.get(const.ATTR_INSTANCE) node = ZWaveGroup(group, network, node_id) if association_type == "add": node.add_association(target_node_id, instance) _LOGGER.info( "Adding association for node:%s in group:%s " "target node:%s, instance=%s", node_id, group, target_node_id, instance, ) if association_type == "remove": node.remove_association(target_node_id, instance) _LOGGER.info( "Removing association for node:%s in group:%s " "target node:%s, instance=%s", node_id, group, target_node_id, instance, ) async def async_refresh_entity(service): """Refresh values that specific entity depends on.""" entity_id = service.data.get(ATTR_ENTITY_ID) async_dispatcher_send(hass, SIGNAL_REFRESH_ENTITY_FORMAT.format(entity_id)) def refresh_node(service): """Refresh all node info.""" node_id = service.data.get(const.ATTR_NODE_ID) node = network.nodes[node_id] node.refresh_info() def reset_node_meters(service): """Reset meter counters of a node.""" node_id = service.data.get(const.ATTR_NODE_ID) instance = service.data.get(const.ATTR_INSTANCE) node = network.nodes[node_id] for value in node.get_values(class_id=const.COMMAND_CLASS_METER).values(): if value.index != const.INDEX_METER_RESET: continue if value.instance != instance: continue network.manager.pressButton(value.value_id) network.manager.releaseButton(value.value_id) _LOGGER.info( "Resetting meters on node %s instance %s....", node_id, instance ) return _LOGGER.info( "Node %s on instance %s does not have resettable meters", node_id, instance ) def heal_node(service): """Heal a node on the network.""" node_id = service.data.get(const.ATTR_NODE_ID) update_return_routes = service.data.get(const.ATTR_RETURN_ROUTES) node = network.nodes[node_id] _LOGGER.info("Z-Wave node heal running for node %s", node_id) node.heal(update_return_routes) def test_node(service): """Send test messages to a node on the network.""" node_id = service.data.get(const.ATTR_NODE_ID) messages = service.data.get(const.ATTR_MESSAGES) node = network.nodes[node_id] _LOGGER.info("Sending %s test-messages to node %s", messages, node_id) node.test(messages) def start_zwave(_service_or_event): """Startup Z-Wave network.""" _LOGGER.info("Starting Z-Wave network...") network.start() hass.bus.fire(const.EVENT_NETWORK_START) async def _check_awaked(): """Wait for Z-wave awaked state (or timeout) and finalize start.""" _LOGGER.debug("network state: %d %s", network.state, network.state_str) start_time = dt_util.utcnow() while True: waited = int((dt_util.utcnow() - start_time).total_seconds()) if network.state >= network.STATE_AWAKED: # Need to be in STATE_AWAKED before talking to nodes. _LOGGER.info("Z-Wave ready after %d seconds", waited) break if waited >= const.NETWORK_READY_WAIT_SECS: # Wait up to NETWORK_READY_WAIT_SECS seconds for the Z-Wave # network to be ready. _LOGGER.warning( "Z-Wave not ready after %d seconds, continuing anyway", waited ) _LOGGER.info( "final network state: %d %s", network.state, network.state_str ) break await asyncio.sleep(1) hass.async_add_job(_finalize_start) hass.add_job(_check_awaked) def _finalize_start(): """Perform final initializations after Z-Wave network is awaked.""" polling_interval = convert(config.get(CONF_POLLING_INTERVAL), int) if polling_interval is not None: network.set_poll_interval(polling_interval, False) poll_interval = network.get_poll_interval() _LOGGER.info("Z-Wave polling interval set to %d ms", poll_interval) hass.bus.listen_once(EVENT_HOMEASSISTANT_STOP, stop_network) # Register node services for Z-Wave network hass.services.register(DOMAIN, const.SERVICE_ADD_NODE, add_node) hass.services.register(DOMAIN, const.SERVICE_ADD_NODE_SECURE, add_node_secure) hass.services.register(DOMAIN, const.SERVICE_REMOVE_NODE, remove_node) hass.services.register(DOMAIN, const.SERVICE_CANCEL_COMMAND, cancel_command) hass.services.register(DOMAIN, const.SERVICE_HEAL_NETWORK, heal_network) hass.services.register(DOMAIN, const.SERVICE_SOFT_RESET, soft_reset) hass.services.register(DOMAIN, const.SERVICE_TEST_NETWORK, test_network) hass.services.register(DOMAIN, const.SERVICE_STOP_NETWORK, stop_network) hass.services.register( DOMAIN, const.SERVICE_RENAME_NODE, rename_node, schema=RENAME_NODE_SCHEMA ) hass.services.register( DOMAIN, const.SERVICE_RENAME_VALUE, rename_value, schema=RENAME_VALUE_SCHEMA ) hass.services.register( DOMAIN, const.SERVICE_SET_CONFIG_PARAMETER, set_config_parameter, schema=SET_CONFIG_PARAMETER_SCHEMA, ) hass.services.register( DOMAIN, const.SERVICE_SET_NODE_VALUE, set_node_value, schema=SET_NODE_VALUE_SCHEMA, ) hass.services.register( DOMAIN, const.SERVICE_REFRESH_NODE_VALUE, refresh_node_value, schema=REFRESH_NODE_VALUE_SCHEMA, ) hass.services.register( DOMAIN, const.SERVICE_PRINT_CONFIG_PARAMETER, print_config_parameter, schema=PRINT_CONFIG_PARAMETER_SCHEMA, ) hass.services.register( DOMAIN, const.SERVICE_REMOVE_FAILED_NODE, remove_failed_node, schema=NODE_SERVICE_SCHEMA, ) hass.services.register( DOMAIN, const.SERVICE_REPLACE_FAILED_NODE, replace_failed_node, schema=NODE_SERVICE_SCHEMA, ) hass.services.register( DOMAIN, const.SERVICE_CHANGE_ASSOCIATION, change_association, schema=CHANGE_ASSOCIATION_SCHEMA, ) hass.services.register( DOMAIN, const.SERVICE_SET_WAKEUP, set_wakeup, schema=SET_WAKEUP_SCHEMA ) hass.services.register( DOMAIN, const.SERVICE_PRINT_NODE, print_node, schema=NODE_SERVICE_SCHEMA ) hass.services.register( DOMAIN, const.SERVICE_REFRESH_ENTITY, async_refresh_entity, schema=REFRESH_ENTITY_SCHEMA, ) hass.services.register( DOMAIN, const.SERVICE_REFRESH_NODE, refresh_node, schema=NODE_SERVICE_SCHEMA ) hass.services.register( DOMAIN, const.SERVICE_RESET_NODE_METERS, reset_node_meters, schema=RESET_NODE_METERS_SCHEMA, ) hass.services.register( DOMAIN, const.SERVICE_SET_POLL_INTENSITY, set_poll_intensity, schema=SET_POLL_INTENSITY_SCHEMA, ) hass.services.register( DOMAIN, const.SERVICE_HEAL_NODE, heal_node, schema=HEAL_NODE_SCHEMA ) hass.services.register( DOMAIN, const.SERVICE_TEST_NODE, test_node, schema=TEST_NODE_SCHEMA ) # Setup autoheal if autoheal: _LOGGER.info("Z-Wave network autoheal is enabled") async_track_time_change(hass, heal_network, hour=0, minute=0, second=0) hass.bus.async_listen_once(EVENT_HOMEASSISTANT_START, start_zwave) hass.services.async_register(DOMAIN, const.SERVICE_START_NETWORK, start_zwave) for entry_component in SUPPORTED_PLATFORMS: hass.async_create_task( hass.config_entries.async_forward_entry_setup(config_entry, entry_component) ) return True class ZWaveDeviceEntityValues: """Manages entity access to the underlying zwave value objects.""" def __init__( self, hass, schema, primary_value, zwave_config, device_config, registry ): """Initialize the values object with the passed entity schema.""" self._hass = hass self._zwave_config = zwave_config self._device_config = device_config self._schema = copy.deepcopy(schema) self._values = {} self._entity = None self._workaround_ignore = False self._registry = registry for name in self._schema[const.DISC_VALUES].keys(): self._values[name] = None self._schema[const.DISC_VALUES][name][const.DISC_INSTANCE] = [ primary_value.instance ] self._values[const.DISC_PRIMARY] = primary_value self._node = primary_value.node self._schema[const.DISC_NODE_ID] = [self._node.node_id] # Check values that have already been discovered for node for value in self._node.values.values(): self.check_value(value) self._check_entity_ready() def __getattr__(self, name): """Get the specified value for this entity.""" return self._values[name] def __iter__(self): """Allow iteration over all values.""" return iter(self._values.values()) def check_value(self, value): """Check if the new value matches a missing value for this entity. If a match is found, it is added to the values mapping. """ if not check_node_schema(value.node, self._schema): return for name in self._values: if self._values[name] is not None: continue if not check_value_schema(value, self._schema[const.DISC_VALUES][name]): continue self._values[name] = value if self._entity: self._entity.value_added() self._entity.value_changed() self._check_entity_ready() def _check_entity_ready(self): """Check if all required values are discovered and create entity.""" if self._workaround_ignore: return if self._entity is not None: return for name in self._schema[const.DISC_VALUES]: if self._values[name] is None and not self._schema[const.DISC_VALUES][ name ].get(const.DISC_OPTIONAL): return component = self._schema[const.DISC_COMPONENT] workaround_component = workaround.get_device_component_mapping(self.primary) if workaround_component and workaround_component != component: if workaround_component == workaround.WORKAROUND_IGNORE: _LOGGER.info( "Ignoring Node %d Value %d due to workaround", self.primary.node.node_id, self.primary.value_id, ) # No entity will be created for this value self._workaround_ignore = True return _LOGGER.debug("Using %s instead of %s", workaround_component, component) component = workaround_component entity_id = self._registry.async_get_entity_id( component, DOMAIN, compute_value_unique_id(self._node, self.primary) ) if entity_id is None: value_name = _value_name(self.primary) entity_id = generate_entity_id(component + ".{}", value_name, []) node_config = self._device_config.get(entity_id) # Configure node _LOGGER.debug( "Adding Node_id=%s Generic_command_class=%s, " "Specific_command_class=%s, " "Command_class=%s, Value type=%s, " "Genre=%s as %s", self._node.node_id, self._node.generic, self._node.specific, self.primary.command_class, self.primary.type, self.primary.genre, component, ) if node_config.get(CONF_IGNORED): _LOGGER.info("Ignoring entity %s due to device settings", entity_id) # No entity will be created for this value self._workaround_ignore = True return polling_intensity = convert(node_config.get(CONF_POLLING_INTENSITY), int) if polling_intensity: self.primary.enable_poll(polling_intensity) platform = import_module(f".{component}", __name__) device = platform.get_device( node=self._node, values=self, node_config=node_config, hass=self._hass ) if device is None: # No entity will be created for this value self._workaround_ignore = True return self._entity = device @callback def _on_ready(sec): _LOGGER.info( "Z-Wave entity %s (node_id: %d) ready after %d seconds", device.name, self._node.node_id, sec, ) self._hass.async_add_job(discover_device, component, device) @callback def _on_timeout(sec): _LOGGER.warning( "Z-Wave entity %s (node_id: %d) not ready after %d seconds, " "continuing anyway", device.name, self._node.node_id, sec, ) self._hass.async_add_job(discover_device, component, device) async def discover_device(component, device): """Put device in a dictionary and call discovery on it.""" if self._hass.data[DATA_DEVICES].get(device.unique_id): return self._hass.data[DATA_DEVICES][device.unique_id] = device if component in SUPPORTED_PLATFORMS: async_dispatcher_send(self._hass, f"zwave_new_{component}", device) else: await discovery.async_load_platform( self._hass, component, DOMAIN, {const.DISCOVERY_DEVICE: device.unique_id}, self._zwave_config, ) if device.unique_id: self._hass.add_job(discover_device, component, device) else: self._hass.add_job(check_has_unique_id, device, _on_ready, _on_timeout) class ZWaveDeviceEntity(ZWaveBaseEntity): """Representation of a Z-Wave node entity.""" def __init__(self, values, domain): """Initialize the z-Wave device.""" # pylint: disable=import-error super().__init__() from openzwave.network import ZWaveNetwork from pydispatch import dispatcher self.values = values self.node = values.primary.node self.values.primary.set_change_verified(False) self._name = _value_name(self.values.primary) self._unique_id = self._compute_unique_id() self._update_attributes() dispatcher.connect( self.network_value_changed, ZWaveNetwork.SIGNAL_VALUE_CHANGED ) def network_value_changed(self, value): """Handle a value change on the network.""" if value.value_id in [v.value_id for v in self.values if v]: return self.value_changed() def value_added(self): """Handle a new value of this entity.""" def value_changed(self): """Handle a changed value for this entity's node.""" self._update_attributes() self.update_properties() self.maybe_schedule_update() async def value_renamed(self, update_ids=False): """Rename the node and update any IDs.""" self._name = _value_name(self.values.primary) if update_ids: # Update entity ID. ent_reg = await async_get_entity_registry(self.hass) new_entity_id = ent_reg.async_generate_entity_id( self.platform.domain, self._name, self.platform.entities.keys() - {self.entity_id}, ) if new_entity_id != self.entity_id: # Don't change the name attribute, it will be None unless # customised and if it's been customised, keep the # customisation. ent_reg.async_update_entity(self.entity_id, new_entity_id=new_entity_id) return # else for the above two ifs, update if not using update_entity self.async_write_ha_state() async def async_added_to_hass(self): """Add device to dict.""" async_dispatcher_connect( self.hass, SIGNAL_REFRESH_ENTITY_FORMAT.format(self.entity_id), self.refresh_from_network, ) def _update_attributes(self): """Update the node attributes. May only be used inside callback.""" self.node_id = self.node.node_id self._name = _value_name(self.values.primary) if not self._unique_id: self._unique_id = self._compute_unique_id() if self._unique_id: self.try_remove_and_add() if self.values.power: self.power_consumption = round( self.values.power.data, self.values.power.precision ) else: self.power_consumption = None def update_properties(self): """Update on data changes for node values.""" @property def should_poll(self): """No polling needed.""" return False @property def unique_id(self): """Return a unique ID.""" return self._unique_id @property def device_info(self): """Return device information.""" identifier, name = node_device_id_and_name( self.node, self.values.primary.instance ) info = { "name": name, "identifiers": {identifier}, "manufacturer": self.node.manufacturer_name, "model": self.node.product_name, } if self.values.primary.instance > 1: info["via_device"] = (DOMAIN, self.node_id) elif self.node_id > 1: info["via_device"] = (DOMAIN, 1) return info @property def name(self): """Return the name of the device.""" return self._name @property def device_state_attributes(self): """Return the device specific state attributes.""" attrs = { const.ATTR_NODE_ID: self.node_id, const.ATTR_VALUE_INDEX: self.values.primary.index, const.ATTR_VALUE_INSTANCE: self.values.primary.instance, const.ATTR_VALUE_ID: str(self.values.primary.value_id), } if self.power_consumption is not None: attrs[ATTR_POWER] = self.power_consumption return attrs def refresh_from_network(self): """Refresh all dependent values from zwave network.""" for value in self.values: if value is not None: self.node.refresh_value(value.value_id) def _compute_unique_id(self): if ( is_node_parsed(self.node) and self.values.primary.label != "Unknown" ) or self.node.is_ready: return compute_value_unique_id(self.node, self.values.primary) return None def compute_value_unique_id(node, value): """Compute unique_id a value would get if it were to get one.""" return f"{node.node_id}-{value.object_id}"
"""The tests for the hassio component.""" import os import pytest from homeassistant.auth.const import GROUP_ID_ADMIN from homeassistant.components import frontend from homeassistant.components.hassio import STORAGE_KEY from homeassistant.setup import async_setup_component from tests.async_mock import patch MOCK_ENVIRON = {"HASSIO": "127.0.0.1", "HASSIO_TOKEN": "abcdefgh"} @pytest.fixture(autouse=True) def mock_all(aioclient_mock): """Mock all setup requests.""" aioclient_mock.post("http://127.0.0.1/homeassistant/options", json={"result": "ok"}) aioclient_mock.get("http://127.0.0.1/supervisor/ping", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/supervisor/options", json={"result": "ok"}) aioclient_mock.get( "http://127.0.0.1/info", json={ "result": "ok", "data": {"supervisor": "222", "homeassistant": "0.110.0", "hassos": None}, }, ) aioclient_mock.get( "http://127.0.0.1/host/info", json={ "result": "ok", "data": { "result": "ok", "data": { "chassis": "vm", "operating_system": "Debian GNU/Linux 10 (buster)", "kernel": "4.19.0-6-amd64", }, }, }, ) aioclient_mock.get( "http://127.0.0.1/ingress/panels", json={"result": "ok", "data": {"panels": {}}} ) async def test_setup_api_ping(hass, aioclient_mock): """Test setup with API ping.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {}) assert result assert aioclient_mock.call_count == 6 assert hass.components.hassio.get_homeassistant_version() == "0.110.0" assert hass.components.hassio.is_hassio() async def test_setup_api_panel(hass, aioclient_mock): """Test setup with API ping.""" assert await async_setup_component(hass, "frontend", {}) with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {}) assert result panels = hass.data[frontend.DATA_PANELS] assert panels.get("hassio").to_response() == { "component_name": "custom", "icon": "hass:home-assistant", "title": "Supervisor", "url_path": "hassio", "require_admin": True, "config": { "_panel_custom": { "embed_iframe": True, "js_url": "/api/hassio/app/entrypoint.js", "name": "hassio-main", "trust_external": False, } }, } async def test_setup_api_push_api_data(hass, aioclient_mock): """Test setup with API push.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component( hass, "hassio", {"http": {"server_port": 9999}, "hassio": {}} ) assert result assert aioclient_mock.call_count == 6 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 9999 assert aioclient_mock.mock_calls[1][2]["watchdog"] async def test_setup_api_push_api_data_server_host(hass, aioclient_mock): """Test setup with API push with active server host.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component( hass, "hassio", {"http": {"server_port": 9999, "server_host": "127.0.0.1"}, "hassio": {}}, ) assert result assert aioclient_mock.call_count == 6 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 9999 assert not aioclient_mock.mock_calls[1][2]["watchdog"] async def test_setup_api_push_api_data_default(hass, aioclient_mock, hass_storage): """Test setup with API push default data.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"http": {}, "hassio": {}}) assert result assert aioclient_mock.call_count == 6 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 8123 refresh_token = aioclient_mock.mock_calls[1][2]["refresh_token"] hassio_user = await hass.auth.async_get_user( hass_storage[STORAGE_KEY]["data"]["hassio_user"] ) assert hassio_user is not None assert hassio_user.system_generated assert len(hassio_user.groups) == 1 assert hassio_user.groups[0].id == GROUP_ID_ADMIN for token in hassio_user.refresh_tokens.values(): if token.token == refresh_token: break else: assert False, "refresh token not found" async def test_setup_adds_admin_group_to_user(hass, aioclient_mock, hass_storage): """Test setup with API push default data.""" # Create user without admin user = await hass.auth.async_create_system_user("Hass.io") assert not user.is_admin await hass.auth.async_create_refresh_token(user) hass_storage[STORAGE_KEY] = { "data": {"hassio_user": user.id}, "key": STORAGE_KEY, "version": 1, } with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"http": {}, "hassio": {}}) assert result assert user.is_admin async def test_setup_api_existing_hassio_user(hass, aioclient_mock, hass_storage): """Test setup with API push default data.""" user = await hass.auth.async_create_system_user("Hass.io test") token = await hass.auth.async_create_refresh_token(user) hass_storage[STORAGE_KEY] = {"version": 1, "data": {"hassio_user": user.id}} with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"http": {}, "hassio": {}}) assert result assert aioclient_mock.call_count == 6 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 8123 assert aioclient_mock.mock_calls[1][2]["refresh_token"] == token.token async def test_setup_core_push_timezone(hass, aioclient_mock): """Test setup with API push default data.""" hass.config.time_zone = "testzone" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"hassio": {}}) assert result assert aioclient_mock.call_count == 6 assert aioclient_mock.mock_calls[2][2]["timezone"] == "testzone" await hass.config.async_update(time_zone="America/New_York") await hass.async_block_till_done() assert aioclient_mock.mock_calls[-1][2]["timezone"] == "America/New_York" async def test_setup_hassio_no_additional_data(hass, aioclient_mock): """Test setup with API push default data.""" with patch.dict(os.environ, MOCK_ENVIRON), patch.dict( os.environ, {"HASSIO_TOKEN": "123456"} ): result = await async_setup_component(hass, "hassio", {"hassio": {}}) assert result assert aioclient_mock.call_count == 6 assert aioclient_mock.mock_calls[-1][3]["X-Hassio-Key"] == "123456" async def test_fail_setup_without_environ_var(hass): """Fail setup if no environ variable set.""" with patch.dict(os.environ, {}, clear=True): result = await async_setup_component(hass, "hassio", {}) assert not result async def test_warn_when_cannot_connect(hass, caplog): """Fail warn when we cannot connect.""" with patch.dict(os.environ, MOCK_ENVIRON), patch( "homeassistant.components.hassio.HassIO.is_connected", return_value=None, ): result = await async_setup_component(hass, "hassio", {}) assert result assert hass.components.hassio.is_hassio() assert "Not connected with Hass.io / system too busy!" in caplog.text async def test_service_register(hassio_env, hass): """Check if service will be setup.""" assert await async_setup_component(hass, "hassio", {}) assert hass.services.has_service("hassio", "addon_start") assert hass.services.has_service("hassio", "addon_stop") assert hass.services.has_service("hassio", "addon_restart") assert hass.services.has_service("hassio", "addon_stdin") assert hass.services.has_service("hassio", "host_shutdown") assert hass.services.has_service("hassio", "host_reboot") assert hass.services.has_service("hassio", "host_reboot") assert hass.services.has_service("hassio", "snapshot_full") assert hass.services.has_service("hassio", "snapshot_partial") assert hass.services.has_service("hassio", "restore_full") assert hass.services.has_service("hassio", "restore_partial") async def test_service_calls(hassio_env, hass, aioclient_mock): """Call service and check the API calls behind that.""" assert await async_setup_component(hass, "hassio", {}) aioclient_mock.post("http://127.0.0.1/addons/test/start", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/addons/test/stop", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/addons/test/restart", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/addons/test/stdin", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/host/shutdown", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/host/reboot", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/snapshots/new/full", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/snapshots/new/partial", json={"result": "ok"}) aioclient_mock.post( "http://127.0.0.1/snapshots/test/restore/full", json={"result": "ok"} ) aioclient_mock.post( "http://127.0.0.1/snapshots/test/restore/partial", json={"result": "ok"} ) await hass.services.async_call("hassio", "addon_start", {"addon": "test"}) await hass.services.async_call("hassio", "addon_stop", {"addon": "test"}) await hass.services.async_call("hassio", "addon_restart", {"addon": "test"}) await hass.services.async_call( "hassio", "addon_stdin", {"addon": "test", "input": "test"} ) await hass.async_block_till_done() assert aioclient_mock.call_count == 7 assert aioclient_mock.mock_calls[-1][2] == "test" await hass.services.async_call("hassio", "host_shutdown", {}) await hass.services.async_call("hassio", "host_reboot", {}) await hass.async_block_till_done() assert aioclient_mock.call_count == 9 await hass.services.async_call("hassio", "snapshot_full", {}) await hass.services.async_call( "hassio", "snapshot_partial", {"addons": ["test"], "folders": ["ssl"], "password": "123456"}, ) await hass.async_block_till_done() assert aioclient_mock.call_count == 11 assert aioclient_mock.mock_calls[-1][2] == { "addons": ["test"], "folders": ["ssl"], "password": "123456", } await hass.services.async_call("hassio", "restore_full", {"snapshot": "test"}) await hass.services.async_call( "hassio", "restore_partial", { "snapshot": "test", "homeassistant": False, "addons": ["test"], "folders": ["ssl"], "password": "123456", }, ) await hass.async_block_till_done() assert aioclient_mock.call_count == 13 assert aioclient_mock.mock_calls[-1][2] == { "addons": ["test"], "folders": ["ssl"], "homeassistant": False, "password": "123456", } async def test_service_calls_core(hassio_env, hass, aioclient_mock): """Call core service and check the API calls behind that.""" assert await async_setup_component(hass, "hassio", {}) aioclient_mock.post("http://127.0.0.1/homeassistant/restart", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/homeassistant/stop", json={"result": "ok"}) await hass.services.async_call("homeassistant", "stop") await hass.async_block_till_done() assert aioclient_mock.call_count == 4 await hass.services.async_call("homeassistant", "check_config") await hass.async_block_till_done() assert aioclient_mock.call_count == 4 with patch( "homeassistant.config.async_check_ha_config_file", return_value=None ) as mock_check_config: await hass.services.async_call("homeassistant", "restart") await hass.async_block_till_done() assert mock_check_config.called assert aioclient_mock.call_count == 5
pschmitt/home-assistant
tests/components/hassio/test_init.py
homeassistant/components/zwave/__init__.py
"""The template component.""" from itertools import chain import logging from homeassistant.const import MATCH_ALL _LOGGER = logging.getLogger(__name__) def initialise_templates(hass, templates, attribute_templates=None): """Initialise templates and attribute templates.""" if attribute_templates is None: attribute_templates = {} for template in chain(templates.values(), attribute_templates.values()): if template is None: continue template.hass = hass def extract_entities( device_name, device_type, manual_entity_ids, templates, attribute_templates=None ): """Extract entity ids from templates and attribute templates.""" if attribute_templates is None: attribute_templates = {} entity_ids = set() if manual_entity_ids is None: invalid_templates = [] for template_name, template in chain( templates.items(), attribute_templates.items() ): if template is None: continue template_entity_ids = template.extract_entities() if template_entity_ids != MATCH_ALL: entity_ids |= set(template_entity_ids) else: invalid_templates.append(template_name.replace("_template", "")) entity_ids = list(entity_ids) if invalid_templates: if not entity_ids: entity_ids = MATCH_ALL _LOGGER.warning( "Template %s '%s' has no entity ids configured to track nor" " were we able to extract the entities to track from the %s " "template(s). This entity will only be able to be updated " "manually", device_type, device_name, ", ".join(invalid_templates), ) else: entity_ids = manual_entity_ids return entity_ids
"""The tests for the hassio component.""" import os import pytest from homeassistant.auth.const import GROUP_ID_ADMIN from homeassistant.components import frontend from homeassistant.components.hassio import STORAGE_KEY from homeassistant.setup import async_setup_component from tests.async_mock import patch MOCK_ENVIRON = {"HASSIO": "127.0.0.1", "HASSIO_TOKEN": "abcdefgh"} @pytest.fixture(autouse=True) def mock_all(aioclient_mock): """Mock all setup requests.""" aioclient_mock.post("http://127.0.0.1/homeassistant/options", json={"result": "ok"}) aioclient_mock.get("http://127.0.0.1/supervisor/ping", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/supervisor/options", json={"result": "ok"}) aioclient_mock.get( "http://127.0.0.1/info", json={ "result": "ok", "data": {"supervisor": "222", "homeassistant": "0.110.0", "hassos": None}, }, ) aioclient_mock.get( "http://127.0.0.1/host/info", json={ "result": "ok", "data": { "result": "ok", "data": { "chassis": "vm", "operating_system": "Debian GNU/Linux 10 (buster)", "kernel": "4.19.0-6-amd64", }, }, }, ) aioclient_mock.get( "http://127.0.0.1/ingress/panels", json={"result": "ok", "data": {"panels": {}}} ) async def test_setup_api_ping(hass, aioclient_mock): """Test setup with API ping.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {}) assert result assert aioclient_mock.call_count == 6 assert hass.components.hassio.get_homeassistant_version() == "0.110.0" assert hass.components.hassio.is_hassio() async def test_setup_api_panel(hass, aioclient_mock): """Test setup with API ping.""" assert await async_setup_component(hass, "frontend", {}) with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {}) assert result panels = hass.data[frontend.DATA_PANELS] assert panels.get("hassio").to_response() == { "component_name": "custom", "icon": "hass:home-assistant", "title": "Supervisor", "url_path": "hassio", "require_admin": True, "config": { "_panel_custom": { "embed_iframe": True, "js_url": "/api/hassio/app/entrypoint.js", "name": "hassio-main", "trust_external": False, } }, } async def test_setup_api_push_api_data(hass, aioclient_mock): """Test setup with API push.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component( hass, "hassio", {"http": {"server_port": 9999}, "hassio": {}} ) assert result assert aioclient_mock.call_count == 6 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 9999 assert aioclient_mock.mock_calls[1][2]["watchdog"] async def test_setup_api_push_api_data_server_host(hass, aioclient_mock): """Test setup with API push with active server host.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component( hass, "hassio", {"http": {"server_port": 9999, "server_host": "127.0.0.1"}, "hassio": {}}, ) assert result assert aioclient_mock.call_count == 6 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 9999 assert not aioclient_mock.mock_calls[1][2]["watchdog"] async def test_setup_api_push_api_data_default(hass, aioclient_mock, hass_storage): """Test setup with API push default data.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"http": {}, "hassio": {}}) assert result assert aioclient_mock.call_count == 6 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 8123 refresh_token = aioclient_mock.mock_calls[1][2]["refresh_token"] hassio_user = await hass.auth.async_get_user( hass_storage[STORAGE_KEY]["data"]["hassio_user"] ) assert hassio_user is not None assert hassio_user.system_generated assert len(hassio_user.groups) == 1 assert hassio_user.groups[0].id == GROUP_ID_ADMIN for token in hassio_user.refresh_tokens.values(): if token.token == refresh_token: break else: assert False, "refresh token not found" async def test_setup_adds_admin_group_to_user(hass, aioclient_mock, hass_storage): """Test setup with API push default data.""" # Create user without admin user = await hass.auth.async_create_system_user("Hass.io") assert not user.is_admin await hass.auth.async_create_refresh_token(user) hass_storage[STORAGE_KEY] = { "data": {"hassio_user": user.id}, "key": STORAGE_KEY, "version": 1, } with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"http": {}, "hassio": {}}) assert result assert user.is_admin async def test_setup_api_existing_hassio_user(hass, aioclient_mock, hass_storage): """Test setup with API push default data.""" user = await hass.auth.async_create_system_user("Hass.io test") token = await hass.auth.async_create_refresh_token(user) hass_storage[STORAGE_KEY] = {"version": 1, "data": {"hassio_user": user.id}} with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"http": {}, "hassio": {}}) assert result assert aioclient_mock.call_count == 6 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 8123 assert aioclient_mock.mock_calls[1][2]["refresh_token"] == token.token async def test_setup_core_push_timezone(hass, aioclient_mock): """Test setup with API push default data.""" hass.config.time_zone = "testzone" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"hassio": {}}) assert result assert aioclient_mock.call_count == 6 assert aioclient_mock.mock_calls[2][2]["timezone"] == "testzone" await hass.config.async_update(time_zone="America/New_York") await hass.async_block_till_done() assert aioclient_mock.mock_calls[-1][2]["timezone"] == "America/New_York" async def test_setup_hassio_no_additional_data(hass, aioclient_mock): """Test setup with API push default data.""" with patch.dict(os.environ, MOCK_ENVIRON), patch.dict( os.environ, {"HASSIO_TOKEN": "123456"} ): result = await async_setup_component(hass, "hassio", {"hassio": {}}) assert result assert aioclient_mock.call_count == 6 assert aioclient_mock.mock_calls[-1][3]["X-Hassio-Key"] == "123456" async def test_fail_setup_without_environ_var(hass): """Fail setup if no environ variable set.""" with patch.dict(os.environ, {}, clear=True): result = await async_setup_component(hass, "hassio", {}) assert not result async def test_warn_when_cannot_connect(hass, caplog): """Fail warn when we cannot connect.""" with patch.dict(os.environ, MOCK_ENVIRON), patch( "homeassistant.components.hassio.HassIO.is_connected", return_value=None, ): result = await async_setup_component(hass, "hassio", {}) assert result assert hass.components.hassio.is_hassio() assert "Not connected with Hass.io / system too busy!" in caplog.text async def test_service_register(hassio_env, hass): """Check if service will be setup.""" assert await async_setup_component(hass, "hassio", {}) assert hass.services.has_service("hassio", "addon_start") assert hass.services.has_service("hassio", "addon_stop") assert hass.services.has_service("hassio", "addon_restart") assert hass.services.has_service("hassio", "addon_stdin") assert hass.services.has_service("hassio", "host_shutdown") assert hass.services.has_service("hassio", "host_reboot") assert hass.services.has_service("hassio", "host_reboot") assert hass.services.has_service("hassio", "snapshot_full") assert hass.services.has_service("hassio", "snapshot_partial") assert hass.services.has_service("hassio", "restore_full") assert hass.services.has_service("hassio", "restore_partial") async def test_service_calls(hassio_env, hass, aioclient_mock): """Call service and check the API calls behind that.""" assert await async_setup_component(hass, "hassio", {}) aioclient_mock.post("http://127.0.0.1/addons/test/start", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/addons/test/stop", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/addons/test/restart", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/addons/test/stdin", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/host/shutdown", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/host/reboot", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/snapshots/new/full", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/snapshots/new/partial", json={"result": "ok"}) aioclient_mock.post( "http://127.0.0.1/snapshots/test/restore/full", json={"result": "ok"} ) aioclient_mock.post( "http://127.0.0.1/snapshots/test/restore/partial", json={"result": "ok"} ) await hass.services.async_call("hassio", "addon_start", {"addon": "test"}) await hass.services.async_call("hassio", "addon_stop", {"addon": "test"}) await hass.services.async_call("hassio", "addon_restart", {"addon": "test"}) await hass.services.async_call( "hassio", "addon_stdin", {"addon": "test", "input": "test"} ) await hass.async_block_till_done() assert aioclient_mock.call_count == 7 assert aioclient_mock.mock_calls[-1][2] == "test" await hass.services.async_call("hassio", "host_shutdown", {}) await hass.services.async_call("hassio", "host_reboot", {}) await hass.async_block_till_done() assert aioclient_mock.call_count == 9 await hass.services.async_call("hassio", "snapshot_full", {}) await hass.services.async_call( "hassio", "snapshot_partial", {"addons": ["test"], "folders": ["ssl"], "password": "123456"}, ) await hass.async_block_till_done() assert aioclient_mock.call_count == 11 assert aioclient_mock.mock_calls[-1][2] == { "addons": ["test"], "folders": ["ssl"], "password": "123456", } await hass.services.async_call("hassio", "restore_full", {"snapshot": "test"}) await hass.services.async_call( "hassio", "restore_partial", { "snapshot": "test", "homeassistant": False, "addons": ["test"], "folders": ["ssl"], "password": "123456", }, ) await hass.async_block_till_done() assert aioclient_mock.call_count == 13 assert aioclient_mock.mock_calls[-1][2] == { "addons": ["test"], "folders": ["ssl"], "homeassistant": False, "password": "123456", } async def test_service_calls_core(hassio_env, hass, aioclient_mock): """Call core service and check the API calls behind that.""" assert await async_setup_component(hass, "hassio", {}) aioclient_mock.post("http://127.0.0.1/homeassistant/restart", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/homeassistant/stop", json={"result": "ok"}) await hass.services.async_call("homeassistant", "stop") await hass.async_block_till_done() assert aioclient_mock.call_count == 4 await hass.services.async_call("homeassistant", "check_config") await hass.async_block_till_done() assert aioclient_mock.call_count == 4 with patch( "homeassistant.config.async_check_ha_config_file", return_value=None ) as mock_check_config: await hass.services.async_call("homeassistant", "restart") await hass.async_block_till_done() assert mock_check_config.called assert aioclient_mock.call_count == 5
pschmitt/home-assistant
tests/components/hassio/test_init.py
homeassistant/components/template/__init__.py
"""Support for Niko Home Control.""" from datetime import timedelta import logging import nikohomecontrol import voluptuous as vol # Import the device class from the component that you want to support from homeassistant.components.light import ATTR_BRIGHTNESS, PLATFORM_SCHEMA, LightEntity from homeassistant.const import CONF_HOST from homeassistant.exceptions import PlatformNotReady import homeassistant.helpers.config_validation as cv from homeassistant.util import Throttle _LOGGER = logging.getLogger(__name__) MIN_TIME_BETWEEN_UPDATES = timedelta(seconds=1) SCAN_INTERVAL = timedelta(seconds=30) PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({vol.Required(CONF_HOST): cv.string}) async def async_setup_platform(hass, config, async_add_entities, discovery_info=None): """Set up the Niko Home Control light platform.""" host = config[CONF_HOST] try: nhc = nikohomecontrol.NikoHomeControl( {"ip": host, "port": 8000, "timeout": 20000} ) niko_data = NikoHomeControlData(hass, nhc) await niko_data.async_update() except OSError as err: _LOGGER.error("Unable to access %s (%s)", host, err) raise PlatformNotReady async_add_entities( [NikoHomeControlLight(light, niko_data) for light in nhc.list_actions()], True ) class NikoHomeControlLight(LightEntity): """Representation of an Niko Light.""" def __init__(self, light, data): """Set up the Niko Home Control light platform.""" self._data = data self._light = light self._unique_id = f"light-{light.id}" self._name = light.name self._state = light.is_on self._brightness = None @property def unique_id(self): """Return unique ID for light.""" return self._unique_id @property def name(self): """Return the display name of this light.""" return self._name @property def brightness(self): """Return the brightness of the light.""" return self._brightness @property def is_on(self): """Return true if light is on.""" return self._state def turn_on(self, **kwargs): """Instruct the light to turn on.""" self._light.brightness = kwargs.get(ATTR_BRIGHTNESS, 255) _LOGGER.debug("Turn on: %s", self.name) self._light.turn_on() def turn_off(self, **kwargs): """Instruct the light to turn off.""" _LOGGER.debug("Turn off: %s", self.name) self._light.turn_off() async def async_update(self): """Get the latest data from NikoHomeControl API.""" await self._data.async_update() self._state = self._data.get_state(self._light.id) class NikoHomeControlData: """The class for handling data retrieval.""" def __init__(self, hass, nhc): """Set up Niko Home Control Data object.""" self._nhc = nhc self.hass = hass self.available = True self.data = {} self._system_info = None @Throttle(MIN_TIME_BETWEEN_UPDATES) async def async_update(self): """Get the latest data from the NikoHomeControl API.""" _LOGGER.debug("Fetching async state in bulk") try: self.data = await self.hass.async_add_executor_job( self._nhc.list_actions_raw ) self.available = True except OSError as ex: _LOGGER.error("Unable to retrieve data from Niko, %s", str(ex)) self.available = False def get_state(self, aid): """Find and filter state based on action id.""" for state in self.data: if state["id"] == aid: return state["value1"] != 0 _LOGGER.error("Failed to retrieve state off unknown light")
"""The tests for the hassio component.""" import os import pytest from homeassistant.auth.const import GROUP_ID_ADMIN from homeassistant.components import frontend from homeassistant.components.hassio import STORAGE_KEY from homeassistant.setup import async_setup_component from tests.async_mock import patch MOCK_ENVIRON = {"HASSIO": "127.0.0.1", "HASSIO_TOKEN": "abcdefgh"} @pytest.fixture(autouse=True) def mock_all(aioclient_mock): """Mock all setup requests.""" aioclient_mock.post("http://127.0.0.1/homeassistant/options", json={"result": "ok"}) aioclient_mock.get("http://127.0.0.1/supervisor/ping", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/supervisor/options", json={"result": "ok"}) aioclient_mock.get( "http://127.0.0.1/info", json={ "result": "ok", "data": {"supervisor": "222", "homeassistant": "0.110.0", "hassos": None}, }, ) aioclient_mock.get( "http://127.0.0.1/host/info", json={ "result": "ok", "data": { "result": "ok", "data": { "chassis": "vm", "operating_system": "Debian GNU/Linux 10 (buster)", "kernel": "4.19.0-6-amd64", }, }, }, ) aioclient_mock.get( "http://127.0.0.1/ingress/panels", json={"result": "ok", "data": {"panels": {}}} ) async def test_setup_api_ping(hass, aioclient_mock): """Test setup with API ping.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {}) assert result assert aioclient_mock.call_count == 6 assert hass.components.hassio.get_homeassistant_version() == "0.110.0" assert hass.components.hassio.is_hassio() async def test_setup_api_panel(hass, aioclient_mock): """Test setup with API ping.""" assert await async_setup_component(hass, "frontend", {}) with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {}) assert result panels = hass.data[frontend.DATA_PANELS] assert panels.get("hassio").to_response() == { "component_name": "custom", "icon": "hass:home-assistant", "title": "Supervisor", "url_path": "hassio", "require_admin": True, "config": { "_panel_custom": { "embed_iframe": True, "js_url": "/api/hassio/app/entrypoint.js", "name": "hassio-main", "trust_external": False, } }, } async def test_setup_api_push_api_data(hass, aioclient_mock): """Test setup with API push.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component( hass, "hassio", {"http": {"server_port": 9999}, "hassio": {}} ) assert result assert aioclient_mock.call_count == 6 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 9999 assert aioclient_mock.mock_calls[1][2]["watchdog"] async def test_setup_api_push_api_data_server_host(hass, aioclient_mock): """Test setup with API push with active server host.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component( hass, "hassio", {"http": {"server_port": 9999, "server_host": "127.0.0.1"}, "hassio": {}}, ) assert result assert aioclient_mock.call_count == 6 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 9999 assert not aioclient_mock.mock_calls[1][2]["watchdog"] async def test_setup_api_push_api_data_default(hass, aioclient_mock, hass_storage): """Test setup with API push default data.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"http": {}, "hassio": {}}) assert result assert aioclient_mock.call_count == 6 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 8123 refresh_token = aioclient_mock.mock_calls[1][2]["refresh_token"] hassio_user = await hass.auth.async_get_user( hass_storage[STORAGE_KEY]["data"]["hassio_user"] ) assert hassio_user is not None assert hassio_user.system_generated assert len(hassio_user.groups) == 1 assert hassio_user.groups[0].id == GROUP_ID_ADMIN for token in hassio_user.refresh_tokens.values(): if token.token == refresh_token: break else: assert False, "refresh token not found" async def test_setup_adds_admin_group_to_user(hass, aioclient_mock, hass_storage): """Test setup with API push default data.""" # Create user without admin user = await hass.auth.async_create_system_user("Hass.io") assert not user.is_admin await hass.auth.async_create_refresh_token(user) hass_storage[STORAGE_KEY] = { "data": {"hassio_user": user.id}, "key": STORAGE_KEY, "version": 1, } with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"http": {}, "hassio": {}}) assert result assert user.is_admin async def test_setup_api_existing_hassio_user(hass, aioclient_mock, hass_storage): """Test setup with API push default data.""" user = await hass.auth.async_create_system_user("Hass.io test") token = await hass.auth.async_create_refresh_token(user) hass_storage[STORAGE_KEY] = {"version": 1, "data": {"hassio_user": user.id}} with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"http": {}, "hassio": {}}) assert result assert aioclient_mock.call_count == 6 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 8123 assert aioclient_mock.mock_calls[1][2]["refresh_token"] == token.token async def test_setup_core_push_timezone(hass, aioclient_mock): """Test setup with API push default data.""" hass.config.time_zone = "testzone" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"hassio": {}}) assert result assert aioclient_mock.call_count == 6 assert aioclient_mock.mock_calls[2][2]["timezone"] == "testzone" await hass.config.async_update(time_zone="America/New_York") await hass.async_block_till_done() assert aioclient_mock.mock_calls[-1][2]["timezone"] == "America/New_York" async def test_setup_hassio_no_additional_data(hass, aioclient_mock): """Test setup with API push default data.""" with patch.dict(os.environ, MOCK_ENVIRON), patch.dict( os.environ, {"HASSIO_TOKEN": "123456"} ): result = await async_setup_component(hass, "hassio", {"hassio": {}}) assert result assert aioclient_mock.call_count == 6 assert aioclient_mock.mock_calls[-1][3]["X-Hassio-Key"] == "123456" async def test_fail_setup_without_environ_var(hass): """Fail setup if no environ variable set.""" with patch.dict(os.environ, {}, clear=True): result = await async_setup_component(hass, "hassio", {}) assert not result async def test_warn_when_cannot_connect(hass, caplog): """Fail warn when we cannot connect.""" with patch.dict(os.environ, MOCK_ENVIRON), patch( "homeassistant.components.hassio.HassIO.is_connected", return_value=None, ): result = await async_setup_component(hass, "hassio", {}) assert result assert hass.components.hassio.is_hassio() assert "Not connected with Hass.io / system too busy!" in caplog.text async def test_service_register(hassio_env, hass): """Check if service will be setup.""" assert await async_setup_component(hass, "hassio", {}) assert hass.services.has_service("hassio", "addon_start") assert hass.services.has_service("hassio", "addon_stop") assert hass.services.has_service("hassio", "addon_restart") assert hass.services.has_service("hassio", "addon_stdin") assert hass.services.has_service("hassio", "host_shutdown") assert hass.services.has_service("hassio", "host_reboot") assert hass.services.has_service("hassio", "host_reboot") assert hass.services.has_service("hassio", "snapshot_full") assert hass.services.has_service("hassio", "snapshot_partial") assert hass.services.has_service("hassio", "restore_full") assert hass.services.has_service("hassio", "restore_partial") async def test_service_calls(hassio_env, hass, aioclient_mock): """Call service and check the API calls behind that.""" assert await async_setup_component(hass, "hassio", {}) aioclient_mock.post("http://127.0.0.1/addons/test/start", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/addons/test/stop", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/addons/test/restart", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/addons/test/stdin", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/host/shutdown", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/host/reboot", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/snapshots/new/full", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/snapshots/new/partial", json={"result": "ok"}) aioclient_mock.post( "http://127.0.0.1/snapshots/test/restore/full", json={"result": "ok"} ) aioclient_mock.post( "http://127.0.0.1/snapshots/test/restore/partial", json={"result": "ok"} ) await hass.services.async_call("hassio", "addon_start", {"addon": "test"}) await hass.services.async_call("hassio", "addon_stop", {"addon": "test"}) await hass.services.async_call("hassio", "addon_restart", {"addon": "test"}) await hass.services.async_call( "hassio", "addon_stdin", {"addon": "test", "input": "test"} ) await hass.async_block_till_done() assert aioclient_mock.call_count == 7 assert aioclient_mock.mock_calls[-1][2] == "test" await hass.services.async_call("hassio", "host_shutdown", {}) await hass.services.async_call("hassio", "host_reboot", {}) await hass.async_block_till_done() assert aioclient_mock.call_count == 9 await hass.services.async_call("hassio", "snapshot_full", {}) await hass.services.async_call( "hassio", "snapshot_partial", {"addons": ["test"], "folders": ["ssl"], "password": "123456"}, ) await hass.async_block_till_done() assert aioclient_mock.call_count == 11 assert aioclient_mock.mock_calls[-1][2] == { "addons": ["test"], "folders": ["ssl"], "password": "123456", } await hass.services.async_call("hassio", "restore_full", {"snapshot": "test"}) await hass.services.async_call( "hassio", "restore_partial", { "snapshot": "test", "homeassistant": False, "addons": ["test"], "folders": ["ssl"], "password": "123456", }, ) await hass.async_block_till_done() assert aioclient_mock.call_count == 13 assert aioclient_mock.mock_calls[-1][2] == { "addons": ["test"], "folders": ["ssl"], "homeassistant": False, "password": "123456", } async def test_service_calls_core(hassio_env, hass, aioclient_mock): """Call core service and check the API calls behind that.""" assert await async_setup_component(hass, "hassio", {}) aioclient_mock.post("http://127.0.0.1/homeassistant/restart", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/homeassistant/stop", json={"result": "ok"}) await hass.services.async_call("homeassistant", "stop") await hass.async_block_till_done() assert aioclient_mock.call_count == 4 await hass.services.async_call("homeassistant", "check_config") await hass.async_block_till_done() assert aioclient_mock.call_count == 4 with patch( "homeassistant.config.async_check_ha_config_file", return_value=None ) as mock_check_config: await hass.services.async_call("homeassistant", "restart") await hass.async_block_till_done() assert mock_check_config.called assert aioclient_mock.call_count == 5
pschmitt/home-assistant
tests/components/hassio/test_init.py
homeassistant/components/niko_home_control/light.py
"""Component that will help set the Microsoft face for verify processing.""" import logging import voluptuous as vol from homeassistant.components.image_processing import ( ATTR_CONFIDENCE, CONF_CONFIDENCE, CONF_ENTITY_ID, CONF_NAME, CONF_SOURCE, PLATFORM_SCHEMA, ImageProcessingFaceEntity, ) from homeassistant.components.microsoft_face import DATA_MICROSOFT_FACE from homeassistant.const import ATTR_NAME from homeassistant.core import split_entity_id from homeassistant.exceptions import HomeAssistantError import homeassistant.helpers.config_validation as cv _LOGGER = logging.getLogger(__name__) CONF_GROUP = "group" PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({vol.Required(CONF_GROUP): cv.slugify}) async def async_setup_platform(hass, config, async_add_entities, discovery_info=None): """Set up the Microsoft Face identify platform.""" api = hass.data[DATA_MICROSOFT_FACE] face_group = config[CONF_GROUP] confidence = config[CONF_CONFIDENCE] entities = [] for camera in config[CONF_SOURCE]: entities.append( MicrosoftFaceIdentifyEntity( camera[CONF_ENTITY_ID], api, face_group, confidence, camera.get(CONF_NAME), ) ) async_add_entities(entities) class MicrosoftFaceIdentifyEntity(ImageProcessingFaceEntity): """Representation of the Microsoft Face API entity for identify.""" def __init__(self, camera_entity, api, face_group, confidence, name=None): """Initialize the Microsoft Face API.""" super().__init__() self._api = api self._camera = camera_entity self._confidence = confidence self._face_group = face_group if name: self._name = name else: self._name = f"MicrosoftFace {split_entity_id(camera_entity)[1]}" @property def confidence(self): """Return minimum confidence for send events.""" return self._confidence @property def camera_entity(self): """Return camera entity id from process pictures.""" return self._camera @property def name(self): """Return the name of the entity.""" return self._name async def async_process_image(self, image): """Process image. This method is a coroutine. """ detect = [] try: face_data = await self._api.call_api("post", "detect", image, binary=True) if face_data: face_ids = [data["faceId"] for data in face_data] detect = await self._api.call_api( "post", "identify", {"faceIds": face_ids, "personGroupId": self._face_group}, ) except HomeAssistantError as err: _LOGGER.error("Can't process image on Microsoft face: %s", err) return # Parse data known_faces = [] total = 0 for face in detect: total += 1 if not face["candidates"]: continue data = face["candidates"][0] name = "" for s_name, s_id in self._api.store[self._face_group].items(): if data["personId"] == s_id: name = s_name break known_faces.append( {ATTR_NAME: name, ATTR_CONFIDENCE: data["confidence"] * 100} ) self.async_process_faces(known_faces, total)
"""The tests for the hassio component.""" import os import pytest from homeassistant.auth.const import GROUP_ID_ADMIN from homeassistant.components import frontend from homeassistant.components.hassio import STORAGE_KEY from homeassistant.setup import async_setup_component from tests.async_mock import patch MOCK_ENVIRON = {"HASSIO": "127.0.0.1", "HASSIO_TOKEN": "abcdefgh"} @pytest.fixture(autouse=True) def mock_all(aioclient_mock): """Mock all setup requests.""" aioclient_mock.post("http://127.0.0.1/homeassistant/options", json={"result": "ok"}) aioclient_mock.get("http://127.0.0.1/supervisor/ping", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/supervisor/options", json={"result": "ok"}) aioclient_mock.get( "http://127.0.0.1/info", json={ "result": "ok", "data": {"supervisor": "222", "homeassistant": "0.110.0", "hassos": None}, }, ) aioclient_mock.get( "http://127.0.0.1/host/info", json={ "result": "ok", "data": { "result": "ok", "data": { "chassis": "vm", "operating_system": "Debian GNU/Linux 10 (buster)", "kernel": "4.19.0-6-amd64", }, }, }, ) aioclient_mock.get( "http://127.0.0.1/ingress/panels", json={"result": "ok", "data": {"panels": {}}} ) async def test_setup_api_ping(hass, aioclient_mock): """Test setup with API ping.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {}) assert result assert aioclient_mock.call_count == 6 assert hass.components.hassio.get_homeassistant_version() == "0.110.0" assert hass.components.hassio.is_hassio() async def test_setup_api_panel(hass, aioclient_mock): """Test setup with API ping.""" assert await async_setup_component(hass, "frontend", {}) with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {}) assert result panels = hass.data[frontend.DATA_PANELS] assert panels.get("hassio").to_response() == { "component_name": "custom", "icon": "hass:home-assistant", "title": "Supervisor", "url_path": "hassio", "require_admin": True, "config": { "_panel_custom": { "embed_iframe": True, "js_url": "/api/hassio/app/entrypoint.js", "name": "hassio-main", "trust_external": False, } }, } async def test_setup_api_push_api_data(hass, aioclient_mock): """Test setup with API push.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component( hass, "hassio", {"http": {"server_port": 9999}, "hassio": {}} ) assert result assert aioclient_mock.call_count == 6 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 9999 assert aioclient_mock.mock_calls[1][2]["watchdog"] async def test_setup_api_push_api_data_server_host(hass, aioclient_mock): """Test setup with API push with active server host.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component( hass, "hassio", {"http": {"server_port": 9999, "server_host": "127.0.0.1"}, "hassio": {}}, ) assert result assert aioclient_mock.call_count == 6 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 9999 assert not aioclient_mock.mock_calls[1][2]["watchdog"] async def test_setup_api_push_api_data_default(hass, aioclient_mock, hass_storage): """Test setup with API push default data.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"http": {}, "hassio": {}}) assert result assert aioclient_mock.call_count == 6 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 8123 refresh_token = aioclient_mock.mock_calls[1][2]["refresh_token"] hassio_user = await hass.auth.async_get_user( hass_storage[STORAGE_KEY]["data"]["hassio_user"] ) assert hassio_user is not None assert hassio_user.system_generated assert len(hassio_user.groups) == 1 assert hassio_user.groups[0].id == GROUP_ID_ADMIN for token in hassio_user.refresh_tokens.values(): if token.token == refresh_token: break else: assert False, "refresh token not found" async def test_setup_adds_admin_group_to_user(hass, aioclient_mock, hass_storage): """Test setup with API push default data.""" # Create user without admin user = await hass.auth.async_create_system_user("Hass.io") assert not user.is_admin await hass.auth.async_create_refresh_token(user) hass_storage[STORAGE_KEY] = { "data": {"hassio_user": user.id}, "key": STORAGE_KEY, "version": 1, } with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"http": {}, "hassio": {}}) assert result assert user.is_admin async def test_setup_api_existing_hassio_user(hass, aioclient_mock, hass_storage): """Test setup with API push default data.""" user = await hass.auth.async_create_system_user("Hass.io test") token = await hass.auth.async_create_refresh_token(user) hass_storage[STORAGE_KEY] = {"version": 1, "data": {"hassio_user": user.id}} with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"http": {}, "hassio": {}}) assert result assert aioclient_mock.call_count == 6 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 8123 assert aioclient_mock.mock_calls[1][2]["refresh_token"] == token.token async def test_setup_core_push_timezone(hass, aioclient_mock): """Test setup with API push default data.""" hass.config.time_zone = "testzone" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"hassio": {}}) assert result assert aioclient_mock.call_count == 6 assert aioclient_mock.mock_calls[2][2]["timezone"] == "testzone" await hass.config.async_update(time_zone="America/New_York") await hass.async_block_till_done() assert aioclient_mock.mock_calls[-1][2]["timezone"] == "America/New_York" async def test_setup_hassio_no_additional_data(hass, aioclient_mock): """Test setup with API push default data.""" with patch.dict(os.environ, MOCK_ENVIRON), patch.dict( os.environ, {"HASSIO_TOKEN": "123456"} ): result = await async_setup_component(hass, "hassio", {"hassio": {}}) assert result assert aioclient_mock.call_count == 6 assert aioclient_mock.mock_calls[-1][3]["X-Hassio-Key"] == "123456" async def test_fail_setup_without_environ_var(hass): """Fail setup if no environ variable set.""" with patch.dict(os.environ, {}, clear=True): result = await async_setup_component(hass, "hassio", {}) assert not result async def test_warn_when_cannot_connect(hass, caplog): """Fail warn when we cannot connect.""" with patch.dict(os.environ, MOCK_ENVIRON), patch( "homeassistant.components.hassio.HassIO.is_connected", return_value=None, ): result = await async_setup_component(hass, "hassio", {}) assert result assert hass.components.hassio.is_hassio() assert "Not connected with Hass.io / system too busy!" in caplog.text async def test_service_register(hassio_env, hass): """Check if service will be setup.""" assert await async_setup_component(hass, "hassio", {}) assert hass.services.has_service("hassio", "addon_start") assert hass.services.has_service("hassio", "addon_stop") assert hass.services.has_service("hassio", "addon_restart") assert hass.services.has_service("hassio", "addon_stdin") assert hass.services.has_service("hassio", "host_shutdown") assert hass.services.has_service("hassio", "host_reboot") assert hass.services.has_service("hassio", "host_reboot") assert hass.services.has_service("hassio", "snapshot_full") assert hass.services.has_service("hassio", "snapshot_partial") assert hass.services.has_service("hassio", "restore_full") assert hass.services.has_service("hassio", "restore_partial") async def test_service_calls(hassio_env, hass, aioclient_mock): """Call service and check the API calls behind that.""" assert await async_setup_component(hass, "hassio", {}) aioclient_mock.post("http://127.0.0.1/addons/test/start", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/addons/test/stop", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/addons/test/restart", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/addons/test/stdin", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/host/shutdown", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/host/reboot", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/snapshots/new/full", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/snapshots/new/partial", json={"result": "ok"}) aioclient_mock.post( "http://127.0.0.1/snapshots/test/restore/full", json={"result": "ok"} ) aioclient_mock.post( "http://127.0.0.1/snapshots/test/restore/partial", json={"result": "ok"} ) await hass.services.async_call("hassio", "addon_start", {"addon": "test"}) await hass.services.async_call("hassio", "addon_stop", {"addon": "test"}) await hass.services.async_call("hassio", "addon_restart", {"addon": "test"}) await hass.services.async_call( "hassio", "addon_stdin", {"addon": "test", "input": "test"} ) await hass.async_block_till_done() assert aioclient_mock.call_count == 7 assert aioclient_mock.mock_calls[-1][2] == "test" await hass.services.async_call("hassio", "host_shutdown", {}) await hass.services.async_call("hassio", "host_reboot", {}) await hass.async_block_till_done() assert aioclient_mock.call_count == 9 await hass.services.async_call("hassio", "snapshot_full", {}) await hass.services.async_call( "hassio", "snapshot_partial", {"addons": ["test"], "folders": ["ssl"], "password": "123456"}, ) await hass.async_block_till_done() assert aioclient_mock.call_count == 11 assert aioclient_mock.mock_calls[-1][2] == { "addons": ["test"], "folders": ["ssl"], "password": "123456", } await hass.services.async_call("hassio", "restore_full", {"snapshot": "test"}) await hass.services.async_call( "hassio", "restore_partial", { "snapshot": "test", "homeassistant": False, "addons": ["test"], "folders": ["ssl"], "password": "123456", }, ) await hass.async_block_till_done() assert aioclient_mock.call_count == 13 assert aioclient_mock.mock_calls[-1][2] == { "addons": ["test"], "folders": ["ssl"], "homeassistant": False, "password": "123456", } async def test_service_calls_core(hassio_env, hass, aioclient_mock): """Call core service and check the API calls behind that.""" assert await async_setup_component(hass, "hassio", {}) aioclient_mock.post("http://127.0.0.1/homeassistant/restart", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/homeassistant/stop", json={"result": "ok"}) await hass.services.async_call("homeassistant", "stop") await hass.async_block_till_done() assert aioclient_mock.call_count == 4 await hass.services.async_call("homeassistant", "check_config") await hass.async_block_till_done() assert aioclient_mock.call_count == 4 with patch( "homeassistant.config.async_check_ha_config_file", return_value=None ) as mock_check_config: await hass.services.async_call("homeassistant", "restart") await hass.async_block_till_done() assert mock_check_config.called assert aioclient_mock.call_count == 5
pschmitt/home-assistant
tests/components/hassio/test_init.py
homeassistant/components/microsoft_face_identify/image_processing.py
"""Support for Huawei LTE sensors.""" import logging import re from typing import Optional import attr from homeassistant.components.sensor import ( DEVICE_CLASS_SIGNAL_STRENGTH, DOMAIN as SENSOR_DOMAIN, ) from homeassistant.const import CONF_URL, DATA_BYTES, STATE_UNKNOWN, TIME_SECONDS from . import HuaweiLteBaseEntity from .const import ( DOMAIN, KEY_DEVICE_INFORMATION, KEY_DEVICE_SIGNAL, KEY_MONITORING_MONTH_STATISTICS, KEY_MONITORING_STATUS, KEY_MONITORING_TRAFFIC_STATISTICS, KEY_NET_CURRENT_PLMN, KEY_NET_NET_MODE, KEY_SMS_SMS_COUNT, SENSOR_KEYS, ) _LOGGER = logging.getLogger(__name__) SENSOR_META = { KEY_DEVICE_INFORMATION: dict( include=re.compile(r"^WanIP.*Address$", re.IGNORECASE) ), (KEY_DEVICE_INFORMATION, "WanIPAddress"): dict( name="WAN IP address", icon="mdi:ip", enabled_default=True ), (KEY_DEVICE_INFORMATION, "WanIPv6Address"): dict( name="WAN IPv6 address", icon="mdi:ip" ), (KEY_DEVICE_SIGNAL, "band"): dict(name="Band"), (KEY_DEVICE_SIGNAL, "cell_id"): dict(name="Cell ID"), (KEY_DEVICE_SIGNAL, "lac"): dict(name="LAC", icon="mdi:map-marker"), (KEY_DEVICE_SIGNAL, "mode"): dict( name="Mode", formatter=lambda x: ({"0": "2G", "2": "3G", "7": "4G"}.get(x, "Unknown"), None), ), (KEY_DEVICE_SIGNAL, "pci"): dict(name="PCI"), (KEY_DEVICE_SIGNAL, "rsrq"): dict( name="RSRQ", device_class=DEVICE_CLASS_SIGNAL_STRENGTH, # http://www.lte-anbieter.info/technik/rsrq.php icon=lambda x: (x is None or x < -11) and "mdi:signal-cellular-outline" or x < -8 and "mdi:signal-cellular-1" or x < -5 and "mdi:signal-cellular-2" or "mdi:signal-cellular-3", enabled_default=True, ), (KEY_DEVICE_SIGNAL, "rsrp"): dict( name="RSRP", device_class=DEVICE_CLASS_SIGNAL_STRENGTH, # http://www.lte-anbieter.info/technik/rsrp.php icon=lambda x: (x is None or x < -110) and "mdi:signal-cellular-outline" or x < -95 and "mdi:signal-cellular-1" or x < -80 and "mdi:signal-cellular-2" or "mdi:signal-cellular-3", enabled_default=True, ), (KEY_DEVICE_SIGNAL, "rssi"): dict( name="RSSI", device_class=DEVICE_CLASS_SIGNAL_STRENGTH, # https://eyesaas.com/wi-fi-signal-strength/ icon=lambda x: (x is None or x < -80) and "mdi:signal-cellular-outline" or x < -70 and "mdi:signal-cellular-1" or x < -60 and "mdi:signal-cellular-2" or "mdi:signal-cellular-3", enabled_default=True, ), (KEY_DEVICE_SIGNAL, "sinr"): dict( name="SINR", device_class=DEVICE_CLASS_SIGNAL_STRENGTH, # http://www.lte-anbieter.info/technik/sinr.php icon=lambda x: (x is None or x < 0) and "mdi:signal-cellular-outline" or x < 5 and "mdi:signal-cellular-1" or x < 10 and "mdi:signal-cellular-2" or "mdi:signal-cellular-3", enabled_default=True, ), (KEY_DEVICE_SIGNAL, "rscp"): dict( name="RSCP", device_class=DEVICE_CLASS_SIGNAL_STRENGTH, # https://wiki.teltonika.lt/view/RSCP icon=lambda x: (x is None or x < -95) and "mdi:signal-cellular-outline" or x < -85 and "mdi:signal-cellular-1" or x < -75 and "mdi:signal-cellular-2" or "mdi:signal-cellular-3", ), (KEY_DEVICE_SIGNAL, "ecio"): dict( name="EC/IO", device_class=DEVICE_CLASS_SIGNAL_STRENGTH, # https://wiki.teltonika.lt/view/EC/IO icon=lambda x: (x is None or x < -20) and "mdi:signal-cellular-outline" or x < -10 and "mdi:signal-cellular-1" or x < -6 and "mdi:signal-cellular-2" or "mdi:signal-cellular-3", ), KEY_MONITORING_MONTH_STATISTICS: dict( exclude=re.compile(r"^month(duration|lastcleartime)$", re.IGNORECASE) ), (KEY_MONITORING_MONTH_STATISTICS, "CurrentMonthDownload"): dict( name="Current month download", unit=DATA_BYTES, icon="mdi:download" ), (KEY_MONITORING_MONTH_STATISTICS, "CurrentMonthUpload"): dict( name="Current month upload", unit=DATA_BYTES, icon="mdi:upload" ), KEY_MONITORING_STATUS: dict( include=re.compile( r"^(currentwifiuser|(primary|secondary).*dns)$", re.IGNORECASE ) ), (KEY_MONITORING_STATUS, "CurrentWifiUser"): dict( name="WiFi clients connected", icon="mdi:wifi" ), (KEY_MONITORING_STATUS, "PrimaryDns"): dict( name="Primary DNS server", icon="mdi:ip" ), (KEY_MONITORING_STATUS, "SecondaryDns"): dict( name="Secondary DNS server", icon="mdi:ip" ), (KEY_MONITORING_STATUS, "PrimaryIPv6Dns"): dict( name="Primary IPv6 DNS server", icon="mdi:ip" ), (KEY_MONITORING_STATUS, "SecondaryIPv6Dns"): dict( name="Secondary IPv6 DNS server", icon="mdi:ip" ), KEY_MONITORING_TRAFFIC_STATISTICS: dict( exclude=re.compile(r"^showtraffic$", re.IGNORECASE) ), (KEY_MONITORING_TRAFFIC_STATISTICS, "CurrentConnectTime"): dict( name="Current connection duration", unit=TIME_SECONDS, icon="mdi:timer-outline" ), (KEY_MONITORING_TRAFFIC_STATISTICS, "CurrentDownload"): dict( name="Current connection download", unit=DATA_BYTES, icon="mdi:download" ), (KEY_MONITORING_TRAFFIC_STATISTICS, "CurrentUpload"): dict( name="Current connection upload", unit=DATA_BYTES, icon="mdi:upload" ), (KEY_MONITORING_TRAFFIC_STATISTICS, "TotalConnectTime"): dict( name="Total connected duration", unit=TIME_SECONDS, icon="mdi:timer-outline" ), (KEY_MONITORING_TRAFFIC_STATISTICS, "TotalDownload"): dict( name="Total download", unit=DATA_BYTES, icon="mdi:download" ), (KEY_MONITORING_TRAFFIC_STATISTICS, "TotalUpload"): dict( name="Total upload", unit=DATA_BYTES, icon="mdi:upload" ), KEY_NET_CURRENT_PLMN: dict(exclude=re.compile(r"^(Rat|ShortName)$", re.IGNORECASE)), (KEY_NET_CURRENT_PLMN, "State"): dict( name="Operator search mode", formatter=lambda x: ({"0": "Auto", "1": "Manual"}.get(x, "Unknown"), None), ), (KEY_NET_CURRENT_PLMN, "FullName"): dict(name="Operator name",), (KEY_NET_CURRENT_PLMN, "Numeric"): dict(name="Operator code",), KEY_NET_NET_MODE: dict(include=re.compile(r"^NetworkMode$", re.IGNORECASE)), (KEY_NET_NET_MODE, "NetworkMode"): dict( name="Preferred mode", formatter=lambda x: ( { "00": "4G/3G/2G", "01": "2G", "02": "3G", "03": "4G", "0301": "4G/2G", "0302": "4G/3G", "0201": "3G/2G", }.get(x, "Unknown"), None, ), ), (KEY_SMS_SMS_COUNT, "LocalUnread"): dict( name="SMS unread", icon="mdi:email-receive", ), } async def async_setup_entry(hass, config_entry, async_add_entities): """Set up from config entry.""" router = hass.data[DOMAIN].routers[config_entry.data[CONF_URL]] sensors = [] for key in SENSOR_KEYS: items = router.data.get(key) if not items: continue key_meta = SENSOR_META.get(key) if key_meta: include = key_meta.get("include") if include: items = filter(include.search, items) exclude = key_meta.get("exclude") if exclude: items = [x for x in items if not exclude.search(x)] for item in items: sensors.append( HuaweiLteSensor(router, key, item, SENSOR_META.get((key, item), {})) ) async_add_entities(sensors, True) def format_default(value): """Format value.""" unit = None if value is not None: # Clean up value and infer unit, e.g. -71dBm, 15 dB match = re.match( r"([>=<]*)(?P<value>.+?)\s*(?P<unit>[a-zA-Z]+)\s*$", str(value) ) if match: try: value = float(match.group("value")) unit = match.group("unit") except ValueError: pass return value, unit @attr.s class HuaweiLteSensor(HuaweiLteBaseEntity): """Huawei LTE sensor entity.""" key: str = attr.ib() item: str = attr.ib() meta: dict = attr.ib() _state = attr.ib(init=False, default=STATE_UNKNOWN) _unit: str = attr.ib(init=False) async def async_added_to_hass(self): """Subscribe to needed data on add.""" await super().async_added_to_hass() self.router.subscriptions[self.key].add(f"{SENSOR_DOMAIN}/{self.item}") async def async_will_remove_from_hass(self): """Unsubscribe from needed data on remove.""" await super().async_will_remove_from_hass() self.router.subscriptions[self.key].remove(f"{SENSOR_DOMAIN}/{self.item}") @property def _entity_name(self) -> str: return self.meta.get("name", self.item) @property def _device_unique_id(self) -> str: return f"{self.key}.{self.item}" @property def state(self): """Return sensor state.""" return self._state @property def device_class(self) -> Optional[str]: """Return sensor device class.""" return self.meta.get("device_class") @property def unit_of_measurement(self): """Return sensor's unit of measurement.""" return self.meta.get("unit", self._unit) @property def icon(self): """Return icon for sensor.""" icon = self.meta.get("icon") if callable(icon): return icon(self.state) return icon @property def entity_registry_enabled_default(self) -> bool: """Return if the entity should be enabled when first added to the entity registry.""" return bool(self.meta.get("enabled_default")) async def async_update(self): """Update state.""" try: value = self.router.data[self.key][self.item] except KeyError: _LOGGER.debug("%s[%s] not in data", self.key, self.item) self._available = False return self._available = True formatter = self.meta.get("formatter") if not callable(formatter): formatter = format_default self._state, self._unit = formatter(value) async def async_setup_platform(*args, **kwargs): """Old no longer used way to set up Huawei LTE sensors.""" _LOGGER.warning( "Loading and configuring as a platform is no longer supported or " "required, convert to enabling/disabling available entities" )
"""The tests for the hassio component.""" import os import pytest from homeassistant.auth.const import GROUP_ID_ADMIN from homeassistant.components import frontend from homeassistant.components.hassio import STORAGE_KEY from homeassistant.setup import async_setup_component from tests.async_mock import patch MOCK_ENVIRON = {"HASSIO": "127.0.0.1", "HASSIO_TOKEN": "abcdefgh"} @pytest.fixture(autouse=True) def mock_all(aioclient_mock): """Mock all setup requests.""" aioclient_mock.post("http://127.0.0.1/homeassistant/options", json={"result": "ok"}) aioclient_mock.get("http://127.0.0.1/supervisor/ping", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/supervisor/options", json={"result": "ok"}) aioclient_mock.get( "http://127.0.0.1/info", json={ "result": "ok", "data": {"supervisor": "222", "homeassistant": "0.110.0", "hassos": None}, }, ) aioclient_mock.get( "http://127.0.0.1/host/info", json={ "result": "ok", "data": { "result": "ok", "data": { "chassis": "vm", "operating_system": "Debian GNU/Linux 10 (buster)", "kernel": "4.19.0-6-amd64", }, }, }, ) aioclient_mock.get( "http://127.0.0.1/ingress/panels", json={"result": "ok", "data": {"panels": {}}} ) async def test_setup_api_ping(hass, aioclient_mock): """Test setup with API ping.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {}) assert result assert aioclient_mock.call_count == 6 assert hass.components.hassio.get_homeassistant_version() == "0.110.0" assert hass.components.hassio.is_hassio() async def test_setup_api_panel(hass, aioclient_mock): """Test setup with API ping.""" assert await async_setup_component(hass, "frontend", {}) with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {}) assert result panels = hass.data[frontend.DATA_PANELS] assert panels.get("hassio").to_response() == { "component_name": "custom", "icon": "hass:home-assistant", "title": "Supervisor", "url_path": "hassio", "require_admin": True, "config": { "_panel_custom": { "embed_iframe": True, "js_url": "/api/hassio/app/entrypoint.js", "name": "hassio-main", "trust_external": False, } }, } async def test_setup_api_push_api_data(hass, aioclient_mock): """Test setup with API push.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component( hass, "hassio", {"http": {"server_port": 9999}, "hassio": {}} ) assert result assert aioclient_mock.call_count == 6 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 9999 assert aioclient_mock.mock_calls[1][2]["watchdog"] async def test_setup_api_push_api_data_server_host(hass, aioclient_mock): """Test setup with API push with active server host.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component( hass, "hassio", {"http": {"server_port": 9999, "server_host": "127.0.0.1"}, "hassio": {}}, ) assert result assert aioclient_mock.call_count == 6 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 9999 assert not aioclient_mock.mock_calls[1][2]["watchdog"] async def test_setup_api_push_api_data_default(hass, aioclient_mock, hass_storage): """Test setup with API push default data.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"http": {}, "hassio": {}}) assert result assert aioclient_mock.call_count == 6 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 8123 refresh_token = aioclient_mock.mock_calls[1][2]["refresh_token"] hassio_user = await hass.auth.async_get_user( hass_storage[STORAGE_KEY]["data"]["hassio_user"] ) assert hassio_user is not None assert hassio_user.system_generated assert len(hassio_user.groups) == 1 assert hassio_user.groups[0].id == GROUP_ID_ADMIN for token in hassio_user.refresh_tokens.values(): if token.token == refresh_token: break else: assert False, "refresh token not found" async def test_setup_adds_admin_group_to_user(hass, aioclient_mock, hass_storage): """Test setup with API push default data.""" # Create user without admin user = await hass.auth.async_create_system_user("Hass.io") assert not user.is_admin await hass.auth.async_create_refresh_token(user) hass_storage[STORAGE_KEY] = { "data": {"hassio_user": user.id}, "key": STORAGE_KEY, "version": 1, } with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"http": {}, "hassio": {}}) assert result assert user.is_admin async def test_setup_api_existing_hassio_user(hass, aioclient_mock, hass_storage): """Test setup with API push default data.""" user = await hass.auth.async_create_system_user("Hass.io test") token = await hass.auth.async_create_refresh_token(user) hass_storage[STORAGE_KEY] = {"version": 1, "data": {"hassio_user": user.id}} with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"http": {}, "hassio": {}}) assert result assert aioclient_mock.call_count == 6 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 8123 assert aioclient_mock.mock_calls[1][2]["refresh_token"] == token.token async def test_setup_core_push_timezone(hass, aioclient_mock): """Test setup with API push default data.""" hass.config.time_zone = "testzone" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"hassio": {}}) assert result assert aioclient_mock.call_count == 6 assert aioclient_mock.mock_calls[2][2]["timezone"] == "testzone" await hass.config.async_update(time_zone="America/New_York") await hass.async_block_till_done() assert aioclient_mock.mock_calls[-1][2]["timezone"] == "America/New_York" async def test_setup_hassio_no_additional_data(hass, aioclient_mock): """Test setup with API push default data.""" with patch.dict(os.environ, MOCK_ENVIRON), patch.dict( os.environ, {"HASSIO_TOKEN": "123456"} ): result = await async_setup_component(hass, "hassio", {"hassio": {}}) assert result assert aioclient_mock.call_count == 6 assert aioclient_mock.mock_calls[-1][3]["X-Hassio-Key"] == "123456" async def test_fail_setup_without_environ_var(hass): """Fail setup if no environ variable set.""" with patch.dict(os.environ, {}, clear=True): result = await async_setup_component(hass, "hassio", {}) assert not result async def test_warn_when_cannot_connect(hass, caplog): """Fail warn when we cannot connect.""" with patch.dict(os.environ, MOCK_ENVIRON), patch( "homeassistant.components.hassio.HassIO.is_connected", return_value=None, ): result = await async_setup_component(hass, "hassio", {}) assert result assert hass.components.hassio.is_hassio() assert "Not connected with Hass.io / system too busy!" in caplog.text async def test_service_register(hassio_env, hass): """Check if service will be setup.""" assert await async_setup_component(hass, "hassio", {}) assert hass.services.has_service("hassio", "addon_start") assert hass.services.has_service("hassio", "addon_stop") assert hass.services.has_service("hassio", "addon_restart") assert hass.services.has_service("hassio", "addon_stdin") assert hass.services.has_service("hassio", "host_shutdown") assert hass.services.has_service("hassio", "host_reboot") assert hass.services.has_service("hassio", "host_reboot") assert hass.services.has_service("hassio", "snapshot_full") assert hass.services.has_service("hassio", "snapshot_partial") assert hass.services.has_service("hassio", "restore_full") assert hass.services.has_service("hassio", "restore_partial") async def test_service_calls(hassio_env, hass, aioclient_mock): """Call service and check the API calls behind that.""" assert await async_setup_component(hass, "hassio", {}) aioclient_mock.post("http://127.0.0.1/addons/test/start", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/addons/test/stop", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/addons/test/restart", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/addons/test/stdin", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/host/shutdown", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/host/reboot", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/snapshots/new/full", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/snapshots/new/partial", json={"result": "ok"}) aioclient_mock.post( "http://127.0.0.1/snapshots/test/restore/full", json={"result": "ok"} ) aioclient_mock.post( "http://127.0.0.1/snapshots/test/restore/partial", json={"result": "ok"} ) await hass.services.async_call("hassio", "addon_start", {"addon": "test"}) await hass.services.async_call("hassio", "addon_stop", {"addon": "test"}) await hass.services.async_call("hassio", "addon_restart", {"addon": "test"}) await hass.services.async_call( "hassio", "addon_stdin", {"addon": "test", "input": "test"} ) await hass.async_block_till_done() assert aioclient_mock.call_count == 7 assert aioclient_mock.mock_calls[-1][2] == "test" await hass.services.async_call("hassio", "host_shutdown", {}) await hass.services.async_call("hassio", "host_reboot", {}) await hass.async_block_till_done() assert aioclient_mock.call_count == 9 await hass.services.async_call("hassio", "snapshot_full", {}) await hass.services.async_call( "hassio", "snapshot_partial", {"addons": ["test"], "folders": ["ssl"], "password": "123456"}, ) await hass.async_block_till_done() assert aioclient_mock.call_count == 11 assert aioclient_mock.mock_calls[-1][2] == { "addons": ["test"], "folders": ["ssl"], "password": "123456", } await hass.services.async_call("hassio", "restore_full", {"snapshot": "test"}) await hass.services.async_call( "hassio", "restore_partial", { "snapshot": "test", "homeassistant": False, "addons": ["test"], "folders": ["ssl"], "password": "123456", }, ) await hass.async_block_till_done() assert aioclient_mock.call_count == 13 assert aioclient_mock.mock_calls[-1][2] == { "addons": ["test"], "folders": ["ssl"], "homeassistant": False, "password": "123456", } async def test_service_calls_core(hassio_env, hass, aioclient_mock): """Call core service and check the API calls behind that.""" assert await async_setup_component(hass, "hassio", {}) aioclient_mock.post("http://127.0.0.1/homeassistant/restart", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/homeassistant/stop", json={"result": "ok"}) await hass.services.async_call("homeassistant", "stop") await hass.async_block_till_done() assert aioclient_mock.call_count == 4 await hass.services.async_call("homeassistant", "check_config") await hass.async_block_till_done() assert aioclient_mock.call_count == 4 with patch( "homeassistant.config.async_check_ha_config_file", return_value=None ) as mock_check_config: await hass.services.async_call("homeassistant", "restart") await hass.async_block_till_done() assert mock_check_config.called assert aioclient_mock.call_count == 5
pschmitt/home-assistant
tests/components/hassio/test_init.py
homeassistant/components/huawei_lte/sensor.py
"""Support for Yamaha MusicCast Receivers.""" import logging import socket import pymusiccast import voluptuous as vol from homeassistant.components.media_player import PLATFORM_SCHEMA, MediaPlayerEntity from homeassistant.components.media_player.const import ( MEDIA_TYPE_MUSIC, SUPPORT_NEXT_TRACK, SUPPORT_PAUSE, SUPPORT_PLAY, SUPPORT_PREVIOUS_TRACK, SUPPORT_SELECT_SOURCE, SUPPORT_STOP, SUPPORT_TURN_OFF, SUPPORT_TURN_ON, SUPPORT_VOLUME_MUTE, SUPPORT_VOLUME_SET, ) from homeassistant.const import ( CONF_HOST, CONF_PORT, STATE_IDLE, STATE_ON, STATE_PAUSED, STATE_PLAYING, STATE_UNKNOWN, ) import homeassistant.helpers.config_validation as cv import homeassistant.util.dt as dt_util _LOGGER = logging.getLogger(__name__) SUPPORTED_FEATURES = ( SUPPORT_PLAY | SUPPORT_PAUSE | SUPPORT_STOP | SUPPORT_PREVIOUS_TRACK | SUPPORT_NEXT_TRACK | SUPPORT_TURN_ON | SUPPORT_TURN_OFF | SUPPORT_VOLUME_SET | SUPPORT_VOLUME_MUTE | SUPPORT_SELECT_SOURCE ) KNOWN_HOSTS_KEY = "data_yamaha_musiccast" INTERVAL_SECONDS = "interval_seconds" DEFAULT_PORT = 5005 DEFAULT_INTERVAL = 480 PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( { vol.Required(CONF_HOST): cv.string, vol.Optional(CONF_PORT, default=DEFAULT_PORT): cv.port, vol.Optional(INTERVAL_SECONDS, default=DEFAULT_INTERVAL): cv.positive_int, } ) def setup_platform(hass, config, add_entities, discovery_info=None): """Set up the Yamaha MusicCast platform.""" known_hosts = hass.data.get(KNOWN_HOSTS_KEY) if known_hosts is None: known_hosts = hass.data[KNOWN_HOSTS_KEY] = [] _LOGGER.debug("known_hosts: %s", known_hosts) host = config.get(CONF_HOST) port = config.get(CONF_PORT) interval = config.get(INTERVAL_SECONDS) # Get IP of host to prevent duplicates try: ipaddr = socket.gethostbyname(host) except (OSError) as error: _LOGGER.error("Could not communicate with %s:%d: %s", host, port, error) return if [item for item in known_hosts if item[0] == ipaddr]: _LOGGER.warning("Host %s:%d already registered", host, port) return if [item for item in known_hosts if item[1] == port]: _LOGGER.warning("Port %s:%d already registered", host, port) return reg_host = (ipaddr, port) known_hosts.append(reg_host) try: receiver = pymusiccast.McDevice(ipaddr, udp_port=port, mc_interval=interval) except pymusiccast.exceptions.YMCInitError as err: _LOGGER.error(err) receiver = None if receiver: for zone in receiver.zones: _LOGGER.debug("Receiver: %s / Port: %d / Zone: %s", receiver, port, zone) add_entities([YamahaDevice(receiver, receiver.zones[zone])], True) else: known_hosts.remove(reg_host) class YamahaDevice(MediaPlayerEntity): """Representation of a Yamaha MusicCast device.""" def __init__(self, recv, zone): """Initialize the Yamaha MusicCast device.""" self._recv = recv self._name = recv.name self._source = None self._source_list = [] self._zone = zone self.mute = False self.media_status = None self.media_status_received = None self.power = STATE_UNKNOWN self.status = STATE_UNKNOWN self.volume = 0 self.volume_max = 0 self._recv.set_yamaha_device(self) self._zone.set_yamaha_device(self) @property def name(self): """Return the name of the device.""" return f"{self._name} ({self._zone.zone_id})" @property def state(self): """Return the state of the device.""" if self.power == STATE_ON and self.status != STATE_UNKNOWN: return self.status return self.power @property def should_poll(self): """Push an update after each command.""" return True @property def is_volume_muted(self): """Boolean if volume is currently muted.""" return self.mute @property def volume_level(self): """Volume level of the media player (0..1).""" return self.volume @property def supported_features(self): """Flag of features that are supported.""" return SUPPORTED_FEATURES @property def source(self): """Return the current input source.""" return self._source @property def source_list(self): """List of available input sources.""" return self._source_list @source_list.setter def source_list(self, value): """Set source_list attribute.""" self._source_list = value @property def media_content_type(self): """Return the media content type.""" return MEDIA_TYPE_MUSIC @property def media_duration(self): """Duration of current playing media in seconds.""" return self.media_status.media_duration if self.media_status else None @property def media_image_url(self): """Image url of current playing media.""" return self.media_status.media_image_url if self.media_status else None @property def media_artist(self): """Artist of current playing media, music track only.""" return self.media_status.media_artist if self.media_status else None @property def media_album(self): """Album of current playing media, music track only.""" return self.media_status.media_album if self.media_status else None @property def media_track(self): """Track number of current playing media, music track only.""" return self.media_status.media_track if self.media_status else None @property def media_title(self): """Title of current playing media.""" return self.media_status.media_title if self.media_status else None @property def media_position(self): """Position of current playing media in seconds.""" if self.media_status and self.state in [ STATE_PLAYING, STATE_PAUSED, STATE_IDLE, ]: return self.media_status.media_position @property def media_position_updated_at(self): """When was the position of the current playing media valid. Returns value from homeassistant.util.dt.utcnow(). """ return self.media_status_received if self.media_status else None def update(self): """Get the latest details from the device.""" _LOGGER.debug("update: %s", self.entity_id) self._recv.update_status() self._zone.update_status() def update_hass(self): """Push updates to Home Assistant.""" if self.entity_id: _LOGGER.debug("update_hass: pushing updates") self.schedule_update_ha_state() return True def turn_on(self): """Turn on specified media player or all.""" _LOGGER.debug("Turn device: on") self._zone.set_power(True) def turn_off(self): """Turn off specified media player or all.""" _LOGGER.debug("Turn device: off") self._zone.set_power(False) def media_play(self): """Send the media player the command for play/pause.""" _LOGGER.debug("Play") self._recv.set_playback("play") def media_pause(self): """Send the media player the command for pause.""" _LOGGER.debug("Pause") self._recv.set_playback("pause") def media_stop(self): """Send the media player the stop command.""" _LOGGER.debug("Stop") self._recv.set_playback("stop") def media_previous_track(self): """Send the media player the command for prev track.""" _LOGGER.debug("Previous") self._recv.set_playback("previous") def media_next_track(self): """Send the media player the command for next track.""" _LOGGER.debug("Next") self._recv.set_playback("next") def mute_volume(self, mute): """Send mute command.""" _LOGGER.debug("Mute volume: %s", mute) self._zone.set_mute(mute) def set_volume_level(self, volume): """Set volume level, range 0..1.""" _LOGGER.debug("Volume level: %.2f / %d", volume, volume * self.volume_max) self._zone.set_volume(volume * self.volume_max) def select_source(self, source): """Send the media player the command to select input source.""" _LOGGER.debug("select_source: %s", source) self.status = STATE_UNKNOWN self._zone.set_input(source) def new_media_status(self, status): """Handle updates of the media status.""" _LOGGER.debug("new media_status arrived") self.media_status = status self.media_status_received = dt_util.utcnow()
"""The tests for the hassio component.""" import os import pytest from homeassistant.auth.const import GROUP_ID_ADMIN from homeassistant.components import frontend from homeassistant.components.hassio import STORAGE_KEY from homeassistant.setup import async_setup_component from tests.async_mock import patch MOCK_ENVIRON = {"HASSIO": "127.0.0.1", "HASSIO_TOKEN": "abcdefgh"} @pytest.fixture(autouse=True) def mock_all(aioclient_mock): """Mock all setup requests.""" aioclient_mock.post("http://127.0.0.1/homeassistant/options", json={"result": "ok"}) aioclient_mock.get("http://127.0.0.1/supervisor/ping", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/supervisor/options", json={"result": "ok"}) aioclient_mock.get( "http://127.0.0.1/info", json={ "result": "ok", "data": {"supervisor": "222", "homeassistant": "0.110.0", "hassos": None}, }, ) aioclient_mock.get( "http://127.0.0.1/host/info", json={ "result": "ok", "data": { "result": "ok", "data": { "chassis": "vm", "operating_system": "Debian GNU/Linux 10 (buster)", "kernel": "4.19.0-6-amd64", }, }, }, ) aioclient_mock.get( "http://127.0.0.1/ingress/panels", json={"result": "ok", "data": {"panels": {}}} ) async def test_setup_api_ping(hass, aioclient_mock): """Test setup with API ping.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {}) assert result assert aioclient_mock.call_count == 6 assert hass.components.hassio.get_homeassistant_version() == "0.110.0" assert hass.components.hassio.is_hassio() async def test_setup_api_panel(hass, aioclient_mock): """Test setup with API ping.""" assert await async_setup_component(hass, "frontend", {}) with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {}) assert result panels = hass.data[frontend.DATA_PANELS] assert panels.get("hassio").to_response() == { "component_name": "custom", "icon": "hass:home-assistant", "title": "Supervisor", "url_path": "hassio", "require_admin": True, "config": { "_panel_custom": { "embed_iframe": True, "js_url": "/api/hassio/app/entrypoint.js", "name": "hassio-main", "trust_external": False, } }, } async def test_setup_api_push_api_data(hass, aioclient_mock): """Test setup with API push.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component( hass, "hassio", {"http": {"server_port": 9999}, "hassio": {}} ) assert result assert aioclient_mock.call_count == 6 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 9999 assert aioclient_mock.mock_calls[1][2]["watchdog"] async def test_setup_api_push_api_data_server_host(hass, aioclient_mock): """Test setup with API push with active server host.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component( hass, "hassio", {"http": {"server_port": 9999, "server_host": "127.0.0.1"}, "hassio": {}}, ) assert result assert aioclient_mock.call_count == 6 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 9999 assert not aioclient_mock.mock_calls[1][2]["watchdog"] async def test_setup_api_push_api_data_default(hass, aioclient_mock, hass_storage): """Test setup with API push default data.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"http": {}, "hassio": {}}) assert result assert aioclient_mock.call_count == 6 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 8123 refresh_token = aioclient_mock.mock_calls[1][2]["refresh_token"] hassio_user = await hass.auth.async_get_user( hass_storage[STORAGE_KEY]["data"]["hassio_user"] ) assert hassio_user is not None assert hassio_user.system_generated assert len(hassio_user.groups) == 1 assert hassio_user.groups[0].id == GROUP_ID_ADMIN for token in hassio_user.refresh_tokens.values(): if token.token == refresh_token: break else: assert False, "refresh token not found" async def test_setup_adds_admin_group_to_user(hass, aioclient_mock, hass_storage): """Test setup with API push default data.""" # Create user without admin user = await hass.auth.async_create_system_user("Hass.io") assert not user.is_admin await hass.auth.async_create_refresh_token(user) hass_storage[STORAGE_KEY] = { "data": {"hassio_user": user.id}, "key": STORAGE_KEY, "version": 1, } with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"http": {}, "hassio": {}}) assert result assert user.is_admin async def test_setup_api_existing_hassio_user(hass, aioclient_mock, hass_storage): """Test setup with API push default data.""" user = await hass.auth.async_create_system_user("Hass.io test") token = await hass.auth.async_create_refresh_token(user) hass_storage[STORAGE_KEY] = {"version": 1, "data": {"hassio_user": user.id}} with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"http": {}, "hassio": {}}) assert result assert aioclient_mock.call_count == 6 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 8123 assert aioclient_mock.mock_calls[1][2]["refresh_token"] == token.token async def test_setup_core_push_timezone(hass, aioclient_mock): """Test setup with API push default data.""" hass.config.time_zone = "testzone" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"hassio": {}}) assert result assert aioclient_mock.call_count == 6 assert aioclient_mock.mock_calls[2][2]["timezone"] == "testzone" await hass.config.async_update(time_zone="America/New_York") await hass.async_block_till_done() assert aioclient_mock.mock_calls[-1][2]["timezone"] == "America/New_York" async def test_setup_hassio_no_additional_data(hass, aioclient_mock): """Test setup with API push default data.""" with patch.dict(os.environ, MOCK_ENVIRON), patch.dict( os.environ, {"HASSIO_TOKEN": "123456"} ): result = await async_setup_component(hass, "hassio", {"hassio": {}}) assert result assert aioclient_mock.call_count == 6 assert aioclient_mock.mock_calls[-1][3]["X-Hassio-Key"] == "123456" async def test_fail_setup_without_environ_var(hass): """Fail setup if no environ variable set.""" with patch.dict(os.environ, {}, clear=True): result = await async_setup_component(hass, "hassio", {}) assert not result async def test_warn_when_cannot_connect(hass, caplog): """Fail warn when we cannot connect.""" with patch.dict(os.environ, MOCK_ENVIRON), patch( "homeassistant.components.hassio.HassIO.is_connected", return_value=None, ): result = await async_setup_component(hass, "hassio", {}) assert result assert hass.components.hassio.is_hassio() assert "Not connected with Hass.io / system too busy!" in caplog.text async def test_service_register(hassio_env, hass): """Check if service will be setup.""" assert await async_setup_component(hass, "hassio", {}) assert hass.services.has_service("hassio", "addon_start") assert hass.services.has_service("hassio", "addon_stop") assert hass.services.has_service("hassio", "addon_restart") assert hass.services.has_service("hassio", "addon_stdin") assert hass.services.has_service("hassio", "host_shutdown") assert hass.services.has_service("hassio", "host_reboot") assert hass.services.has_service("hassio", "host_reboot") assert hass.services.has_service("hassio", "snapshot_full") assert hass.services.has_service("hassio", "snapshot_partial") assert hass.services.has_service("hassio", "restore_full") assert hass.services.has_service("hassio", "restore_partial") async def test_service_calls(hassio_env, hass, aioclient_mock): """Call service and check the API calls behind that.""" assert await async_setup_component(hass, "hassio", {}) aioclient_mock.post("http://127.0.0.1/addons/test/start", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/addons/test/stop", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/addons/test/restart", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/addons/test/stdin", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/host/shutdown", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/host/reboot", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/snapshots/new/full", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/snapshots/new/partial", json={"result": "ok"}) aioclient_mock.post( "http://127.0.0.1/snapshots/test/restore/full", json={"result": "ok"} ) aioclient_mock.post( "http://127.0.0.1/snapshots/test/restore/partial", json={"result": "ok"} ) await hass.services.async_call("hassio", "addon_start", {"addon": "test"}) await hass.services.async_call("hassio", "addon_stop", {"addon": "test"}) await hass.services.async_call("hassio", "addon_restart", {"addon": "test"}) await hass.services.async_call( "hassio", "addon_stdin", {"addon": "test", "input": "test"} ) await hass.async_block_till_done() assert aioclient_mock.call_count == 7 assert aioclient_mock.mock_calls[-1][2] == "test" await hass.services.async_call("hassio", "host_shutdown", {}) await hass.services.async_call("hassio", "host_reboot", {}) await hass.async_block_till_done() assert aioclient_mock.call_count == 9 await hass.services.async_call("hassio", "snapshot_full", {}) await hass.services.async_call( "hassio", "snapshot_partial", {"addons": ["test"], "folders": ["ssl"], "password": "123456"}, ) await hass.async_block_till_done() assert aioclient_mock.call_count == 11 assert aioclient_mock.mock_calls[-1][2] == { "addons": ["test"], "folders": ["ssl"], "password": "123456", } await hass.services.async_call("hassio", "restore_full", {"snapshot": "test"}) await hass.services.async_call( "hassio", "restore_partial", { "snapshot": "test", "homeassistant": False, "addons": ["test"], "folders": ["ssl"], "password": "123456", }, ) await hass.async_block_till_done() assert aioclient_mock.call_count == 13 assert aioclient_mock.mock_calls[-1][2] == { "addons": ["test"], "folders": ["ssl"], "homeassistant": False, "password": "123456", } async def test_service_calls_core(hassio_env, hass, aioclient_mock): """Call core service and check the API calls behind that.""" assert await async_setup_component(hass, "hassio", {}) aioclient_mock.post("http://127.0.0.1/homeassistant/restart", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/homeassistant/stop", json={"result": "ok"}) await hass.services.async_call("homeassistant", "stop") await hass.async_block_till_done() assert aioclient_mock.call_count == 4 await hass.services.async_call("homeassistant", "check_config") await hass.async_block_till_done() assert aioclient_mock.call_count == 4 with patch( "homeassistant.config.async_check_ha_config_file", return_value=None ) as mock_check_config: await hass.services.async_call("homeassistant", "restart") await hass.async_block_till_done() assert mock_check_config.called assert aioclient_mock.call_count == 5
pschmitt/home-assistant
tests/components/hassio/test_init.py
homeassistant/components/yamaha_musiccast/media_player.py
"""Support for Ubee router.""" import logging from pyubee import Ubee import voluptuous as vol from homeassistant.components.device_tracker import ( DOMAIN, PLATFORM_SCHEMA, DeviceScanner, ) from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_USERNAME import homeassistant.helpers.config_validation as cv _LOGGER = logging.getLogger(__name__) CONF_MODEL = "model" DEFAULT_MODEL = "detect" PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( { vol.Required(CONF_HOST): cv.string, vol.Required(CONF_PASSWORD): cv.string, vol.Required(CONF_USERNAME): cv.string, vol.Optional(CONF_MODEL, default=DEFAULT_MODEL): vol.Any( "EVW32C-0N", "EVW320B", "EVW321B", "EVW3200-Wifi", "EVW3226@UPC", "DVW32CB", "DDW36C", ), } ) def get_scanner(hass, config): """Validate the configuration and return a Ubee scanner.""" info = config[DOMAIN] host = info[CONF_HOST] username = info[CONF_USERNAME] password = info[CONF_PASSWORD] model = info[CONF_MODEL] ubee = Ubee(host, username, password, model) if not ubee.login(): _LOGGER.error("Login failed") return None scanner = UbeeDeviceScanner(ubee) return scanner class UbeeDeviceScanner(DeviceScanner): """This class queries a wireless Ubee router.""" def __init__(self, ubee): """Initialize the Ubee scanner.""" self._ubee = ubee self._mac2name = {} def scan_devices(self): """Scan for new devices and return a list with found device IDs.""" devices = self._get_connected_devices() self._mac2name = devices return list(devices) def get_device_name(self, device): """Return the name of the given device or None if we don't know.""" return self._mac2name.get(device) def _get_connected_devices(self): """List connected devices with pyubee.""" if not self._ubee.session_active(): self._ubee.login() return self._ubee.get_connected_devices()
"""The tests for the hassio component.""" import os import pytest from homeassistant.auth.const import GROUP_ID_ADMIN from homeassistant.components import frontend from homeassistant.components.hassio import STORAGE_KEY from homeassistant.setup import async_setup_component from tests.async_mock import patch MOCK_ENVIRON = {"HASSIO": "127.0.0.1", "HASSIO_TOKEN": "abcdefgh"} @pytest.fixture(autouse=True) def mock_all(aioclient_mock): """Mock all setup requests.""" aioclient_mock.post("http://127.0.0.1/homeassistant/options", json={"result": "ok"}) aioclient_mock.get("http://127.0.0.1/supervisor/ping", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/supervisor/options", json={"result": "ok"}) aioclient_mock.get( "http://127.0.0.1/info", json={ "result": "ok", "data": {"supervisor": "222", "homeassistant": "0.110.0", "hassos": None}, }, ) aioclient_mock.get( "http://127.0.0.1/host/info", json={ "result": "ok", "data": { "result": "ok", "data": { "chassis": "vm", "operating_system": "Debian GNU/Linux 10 (buster)", "kernel": "4.19.0-6-amd64", }, }, }, ) aioclient_mock.get( "http://127.0.0.1/ingress/panels", json={"result": "ok", "data": {"panels": {}}} ) async def test_setup_api_ping(hass, aioclient_mock): """Test setup with API ping.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {}) assert result assert aioclient_mock.call_count == 6 assert hass.components.hassio.get_homeassistant_version() == "0.110.0" assert hass.components.hassio.is_hassio() async def test_setup_api_panel(hass, aioclient_mock): """Test setup with API ping.""" assert await async_setup_component(hass, "frontend", {}) with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {}) assert result panels = hass.data[frontend.DATA_PANELS] assert panels.get("hassio").to_response() == { "component_name": "custom", "icon": "hass:home-assistant", "title": "Supervisor", "url_path": "hassio", "require_admin": True, "config": { "_panel_custom": { "embed_iframe": True, "js_url": "/api/hassio/app/entrypoint.js", "name": "hassio-main", "trust_external": False, } }, } async def test_setup_api_push_api_data(hass, aioclient_mock): """Test setup with API push.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component( hass, "hassio", {"http": {"server_port": 9999}, "hassio": {}} ) assert result assert aioclient_mock.call_count == 6 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 9999 assert aioclient_mock.mock_calls[1][2]["watchdog"] async def test_setup_api_push_api_data_server_host(hass, aioclient_mock): """Test setup with API push with active server host.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component( hass, "hassio", {"http": {"server_port": 9999, "server_host": "127.0.0.1"}, "hassio": {}}, ) assert result assert aioclient_mock.call_count == 6 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 9999 assert not aioclient_mock.mock_calls[1][2]["watchdog"] async def test_setup_api_push_api_data_default(hass, aioclient_mock, hass_storage): """Test setup with API push default data.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"http": {}, "hassio": {}}) assert result assert aioclient_mock.call_count == 6 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 8123 refresh_token = aioclient_mock.mock_calls[1][2]["refresh_token"] hassio_user = await hass.auth.async_get_user( hass_storage[STORAGE_KEY]["data"]["hassio_user"] ) assert hassio_user is not None assert hassio_user.system_generated assert len(hassio_user.groups) == 1 assert hassio_user.groups[0].id == GROUP_ID_ADMIN for token in hassio_user.refresh_tokens.values(): if token.token == refresh_token: break else: assert False, "refresh token not found" async def test_setup_adds_admin_group_to_user(hass, aioclient_mock, hass_storage): """Test setup with API push default data.""" # Create user without admin user = await hass.auth.async_create_system_user("Hass.io") assert not user.is_admin await hass.auth.async_create_refresh_token(user) hass_storage[STORAGE_KEY] = { "data": {"hassio_user": user.id}, "key": STORAGE_KEY, "version": 1, } with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"http": {}, "hassio": {}}) assert result assert user.is_admin async def test_setup_api_existing_hassio_user(hass, aioclient_mock, hass_storage): """Test setup with API push default data.""" user = await hass.auth.async_create_system_user("Hass.io test") token = await hass.auth.async_create_refresh_token(user) hass_storage[STORAGE_KEY] = {"version": 1, "data": {"hassio_user": user.id}} with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"http": {}, "hassio": {}}) assert result assert aioclient_mock.call_count == 6 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 8123 assert aioclient_mock.mock_calls[1][2]["refresh_token"] == token.token async def test_setup_core_push_timezone(hass, aioclient_mock): """Test setup with API push default data.""" hass.config.time_zone = "testzone" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"hassio": {}}) assert result assert aioclient_mock.call_count == 6 assert aioclient_mock.mock_calls[2][2]["timezone"] == "testzone" await hass.config.async_update(time_zone="America/New_York") await hass.async_block_till_done() assert aioclient_mock.mock_calls[-1][2]["timezone"] == "America/New_York" async def test_setup_hassio_no_additional_data(hass, aioclient_mock): """Test setup with API push default data.""" with patch.dict(os.environ, MOCK_ENVIRON), patch.dict( os.environ, {"HASSIO_TOKEN": "123456"} ): result = await async_setup_component(hass, "hassio", {"hassio": {}}) assert result assert aioclient_mock.call_count == 6 assert aioclient_mock.mock_calls[-1][3]["X-Hassio-Key"] == "123456" async def test_fail_setup_without_environ_var(hass): """Fail setup if no environ variable set.""" with patch.dict(os.environ, {}, clear=True): result = await async_setup_component(hass, "hassio", {}) assert not result async def test_warn_when_cannot_connect(hass, caplog): """Fail warn when we cannot connect.""" with patch.dict(os.environ, MOCK_ENVIRON), patch( "homeassistant.components.hassio.HassIO.is_connected", return_value=None, ): result = await async_setup_component(hass, "hassio", {}) assert result assert hass.components.hassio.is_hassio() assert "Not connected with Hass.io / system too busy!" in caplog.text async def test_service_register(hassio_env, hass): """Check if service will be setup.""" assert await async_setup_component(hass, "hassio", {}) assert hass.services.has_service("hassio", "addon_start") assert hass.services.has_service("hassio", "addon_stop") assert hass.services.has_service("hassio", "addon_restart") assert hass.services.has_service("hassio", "addon_stdin") assert hass.services.has_service("hassio", "host_shutdown") assert hass.services.has_service("hassio", "host_reboot") assert hass.services.has_service("hassio", "host_reboot") assert hass.services.has_service("hassio", "snapshot_full") assert hass.services.has_service("hassio", "snapshot_partial") assert hass.services.has_service("hassio", "restore_full") assert hass.services.has_service("hassio", "restore_partial") async def test_service_calls(hassio_env, hass, aioclient_mock): """Call service and check the API calls behind that.""" assert await async_setup_component(hass, "hassio", {}) aioclient_mock.post("http://127.0.0.1/addons/test/start", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/addons/test/stop", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/addons/test/restart", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/addons/test/stdin", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/host/shutdown", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/host/reboot", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/snapshots/new/full", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/snapshots/new/partial", json={"result": "ok"}) aioclient_mock.post( "http://127.0.0.1/snapshots/test/restore/full", json={"result": "ok"} ) aioclient_mock.post( "http://127.0.0.1/snapshots/test/restore/partial", json={"result": "ok"} ) await hass.services.async_call("hassio", "addon_start", {"addon": "test"}) await hass.services.async_call("hassio", "addon_stop", {"addon": "test"}) await hass.services.async_call("hassio", "addon_restart", {"addon": "test"}) await hass.services.async_call( "hassio", "addon_stdin", {"addon": "test", "input": "test"} ) await hass.async_block_till_done() assert aioclient_mock.call_count == 7 assert aioclient_mock.mock_calls[-1][2] == "test" await hass.services.async_call("hassio", "host_shutdown", {}) await hass.services.async_call("hassio", "host_reboot", {}) await hass.async_block_till_done() assert aioclient_mock.call_count == 9 await hass.services.async_call("hassio", "snapshot_full", {}) await hass.services.async_call( "hassio", "snapshot_partial", {"addons": ["test"], "folders": ["ssl"], "password": "123456"}, ) await hass.async_block_till_done() assert aioclient_mock.call_count == 11 assert aioclient_mock.mock_calls[-1][2] == { "addons": ["test"], "folders": ["ssl"], "password": "123456", } await hass.services.async_call("hassio", "restore_full", {"snapshot": "test"}) await hass.services.async_call( "hassio", "restore_partial", { "snapshot": "test", "homeassistant": False, "addons": ["test"], "folders": ["ssl"], "password": "123456", }, ) await hass.async_block_till_done() assert aioclient_mock.call_count == 13 assert aioclient_mock.mock_calls[-1][2] == { "addons": ["test"], "folders": ["ssl"], "homeassistant": False, "password": "123456", } async def test_service_calls_core(hassio_env, hass, aioclient_mock): """Call core service and check the API calls behind that.""" assert await async_setup_component(hass, "hassio", {}) aioclient_mock.post("http://127.0.0.1/homeassistant/restart", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/homeassistant/stop", json={"result": "ok"}) await hass.services.async_call("homeassistant", "stop") await hass.async_block_till_done() assert aioclient_mock.call_count == 4 await hass.services.async_call("homeassistant", "check_config") await hass.async_block_till_done() assert aioclient_mock.call_count == 4 with patch( "homeassistant.config.async_check_ha_config_file", return_value=None ) as mock_check_config: await hass.services.async_call("homeassistant", "restart") await hass.async_block_till_done() assert mock_check_config.called assert aioclient_mock.call_count == 5
pschmitt/home-assistant
tests/components/hassio/test_init.py
homeassistant/components/ubee/device_tracker.py
"""Component to interface with switches that can be controlled remotely.""" from datetime import timedelta import logging import voluptuous as vol from homeassistant.const import ( SERVICE_TOGGLE, SERVICE_TURN_OFF, SERVICE_TURN_ON, STATE_ON, ) from homeassistant.helpers.config_validation import ( # noqa: F401 PLATFORM_SCHEMA, PLATFORM_SCHEMA_BASE, ) from homeassistant.helpers.entity import ToggleEntity from homeassistant.helpers.entity_component import EntityComponent from homeassistant.loader import bind_hass # mypy: allow-untyped-defs, no-check-untyped-defs DOMAIN = "switch" SCAN_INTERVAL = timedelta(seconds=30) ENTITY_ID_FORMAT = DOMAIN + ".{}" ATTR_TODAY_ENERGY_KWH = "today_energy_kwh" ATTR_CURRENT_POWER_W = "current_power_w" MIN_TIME_BETWEEN_SCANS = timedelta(seconds=10) PROP_TO_ATTR = { "current_power_w": ATTR_CURRENT_POWER_W, "today_energy_kwh": ATTR_TODAY_ENERGY_KWH, } DEVICE_CLASS_OUTLET = "outlet" DEVICE_CLASS_SWITCH = "switch" DEVICE_CLASSES = [DEVICE_CLASS_OUTLET, DEVICE_CLASS_SWITCH] DEVICE_CLASSES_SCHEMA = vol.All(vol.Lower, vol.In(DEVICE_CLASSES)) _LOGGER = logging.getLogger(__name__) @bind_hass def is_on(hass, entity_id): """Return if the switch is on based on the statemachine. Async friendly. """ return hass.states.is_state(entity_id, STATE_ON) async def async_setup(hass, config): """Track states and offer events for switches.""" component = hass.data[DOMAIN] = EntityComponent( _LOGGER, DOMAIN, hass, SCAN_INTERVAL ) await component.async_setup(config) component.async_register_entity_service(SERVICE_TURN_OFF, {}, "async_turn_off") component.async_register_entity_service(SERVICE_TURN_ON, {}, "async_turn_on") component.async_register_entity_service(SERVICE_TOGGLE, {}, "async_toggle") return True async def async_setup_entry(hass, entry): """Set up a config entry.""" return await hass.data[DOMAIN].async_setup_entry(entry) async def async_unload_entry(hass, entry): """Unload a config entry.""" return await hass.data[DOMAIN].async_unload_entry(entry) class SwitchEntity(ToggleEntity): """Representation of a switch.""" @property def current_power_w(self): """Return the current power usage in W.""" return None @property def today_energy_kwh(self): """Return the today total energy usage in kWh.""" return None @property def is_standby(self): """Return true if device is in standby.""" return None @property def state_attributes(self): """Return the optional state attributes.""" data = {} for prop, attr in PROP_TO_ATTR.items(): value = getattr(self, prop) if value is not None: data[attr] = value return data @property def device_class(self): """Return the class of this device, from component DEVICE_CLASSES.""" return None class SwitchDevice(SwitchEntity): """Representation of a switch (for backwards compatibility).""" def __init_subclass__(cls, **kwargs): """Print deprecation warning.""" super().__init_subclass__(**kwargs) _LOGGER.warning( "SwitchDevice is deprecated, modify %s to extend SwitchEntity", cls.__name__, )
"""The tests for the hassio component.""" import os import pytest from homeassistant.auth.const import GROUP_ID_ADMIN from homeassistant.components import frontend from homeassistant.components.hassio import STORAGE_KEY from homeassistant.setup import async_setup_component from tests.async_mock import patch MOCK_ENVIRON = {"HASSIO": "127.0.0.1", "HASSIO_TOKEN": "abcdefgh"} @pytest.fixture(autouse=True) def mock_all(aioclient_mock): """Mock all setup requests.""" aioclient_mock.post("http://127.0.0.1/homeassistant/options", json={"result": "ok"}) aioclient_mock.get("http://127.0.0.1/supervisor/ping", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/supervisor/options", json={"result": "ok"}) aioclient_mock.get( "http://127.0.0.1/info", json={ "result": "ok", "data": {"supervisor": "222", "homeassistant": "0.110.0", "hassos": None}, }, ) aioclient_mock.get( "http://127.0.0.1/host/info", json={ "result": "ok", "data": { "result": "ok", "data": { "chassis": "vm", "operating_system": "Debian GNU/Linux 10 (buster)", "kernel": "4.19.0-6-amd64", }, }, }, ) aioclient_mock.get( "http://127.0.0.1/ingress/panels", json={"result": "ok", "data": {"panels": {}}} ) async def test_setup_api_ping(hass, aioclient_mock): """Test setup with API ping.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {}) assert result assert aioclient_mock.call_count == 6 assert hass.components.hassio.get_homeassistant_version() == "0.110.0" assert hass.components.hassio.is_hassio() async def test_setup_api_panel(hass, aioclient_mock): """Test setup with API ping.""" assert await async_setup_component(hass, "frontend", {}) with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {}) assert result panels = hass.data[frontend.DATA_PANELS] assert panels.get("hassio").to_response() == { "component_name": "custom", "icon": "hass:home-assistant", "title": "Supervisor", "url_path": "hassio", "require_admin": True, "config": { "_panel_custom": { "embed_iframe": True, "js_url": "/api/hassio/app/entrypoint.js", "name": "hassio-main", "trust_external": False, } }, } async def test_setup_api_push_api_data(hass, aioclient_mock): """Test setup with API push.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component( hass, "hassio", {"http": {"server_port": 9999}, "hassio": {}} ) assert result assert aioclient_mock.call_count == 6 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 9999 assert aioclient_mock.mock_calls[1][2]["watchdog"] async def test_setup_api_push_api_data_server_host(hass, aioclient_mock): """Test setup with API push with active server host.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component( hass, "hassio", {"http": {"server_port": 9999, "server_host": "127.0.0.1"}, "hassio": {}}, ) assert result assert aioclient_mock.call_count == 6 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 9999 assert not aioclient_mock.mock_calls[1][2]["watchdog"] async def test_setup_api_push_api_data_default(hass, aioclient_mock, hass_storage): """Test setup with API push default data.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"http": {}, "hassio": {}}) assert result assert aioclient_mock.call_count == 6 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 8123 refresh_token = aioclient_mock.mock_calls[1][2]["refresh_token"] hassio_user = await hass.auth.async_get_user( hass_storage[STORAGE_KEY]["data"]["hassio_user"] ) assert hassio_user is not None assert hassio_user.system_generated assert len(hassio_user.groups) == 1 assert hassio_user.groups[0].id == GROUP_ID_ADMIN for token in hassio_user.refresh_tokens.values(): if token.token == refresh_token: break else: assert False, "refresh token not found" async def test_setup_adds_admin_group_to_user(hass, aioclient_mock, hass_storage): """Test setup with API push default data.""" # Create user without admin user = await hass.auth.async_create_system_user("Hass.io") assert not user.is_admin await hass.auth.async_create_refresh_token(user) hass_storage[STORAGE_KEY] = { "data": {"hassio_user": user.id}, "key": STORAGE_KEY, "version": 1, } with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"http": {}, "hassio": {}}) assert result assert user.is_admin async def test_setup_api_existing_hassio_user(hass, aioclient_mock, hass_storage): """Test setup with API push default data.""" user = await hass.auth.async_create_system_user("Hass.io test") token = await hass.auth.async_create_refresh_token(user) hass_storage[STORAGE_KEY] = {"version": 1, "data": {"hassio_user": user.id}} with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"http": {}, "hassio": {}}) assert result assert aioclient_mock.call_count == 6 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 8123 assert aioclient_mock.mock_calls[1][2]["refresh_token"] == token.token async def test_setup_core_push_timezone(hass, aioclient_mock): """Test setup with API push default data.""" hass.config.time_zone = "testzone" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"hassio": {}}) assert result assert aioclient_mock.call_count == 6 assert aioclient_mock.mock_calls[2][2]["timezone"] == "testzone" await hass.config.async_update(time_zone="America/New_York") await hass.async_block_till_done() assert aioclient_mock.mock_calls[-1][2]["timezone"] == "America/New_York" async def test_setup_hassio_no_additional_data(hass, aioclient_mock): """Test setup with API push default data.""" with patch.dict(os.environ, MOCK_ENVIRON), patch.dict( os.environ, {"HASSIO_TOKEN": "123456"} ): result = await async_setup_component(hass, "hassio", {"hassio": {}}) assert result assert aioclient_mock.call_count == 6 assert aioclient_mock.mock_calls[-1][3]["X-Hassio-Key"] == "123456" async def test_fail_setup_without_environ_var(hass): """Fail setup if no environ variable set.""" with patch.dict(os.environ, {}, clear=True): result = await async_setup_component(hass, "hassio", {}) assert not result async def test_warn_when_cannot_connect(hass, caplog): """Fail warn when we cannot connect.""" with patch.dict(os.environ, MOCK_ENVIRON), patch( "homeassistant.components.hassio.HassIO.is_connected", return_value=None, ): result = await async_setup_component(hass, "hassio", {}) assert result assert hass.components.hassio.is_hassio() assert "Not connected with Hass.io / system too busy!" in caplog.text async def test_service_register(hassio_env, hass): """Check if service will be setup.""" assert await async_setup_component(hass, "hassio", {}) assert hass.services.has_service("hassio", "addon_start") assert hass.services.has_service("hassio", "addon_stop") assert hass.services.has_service("hassio", "addon_restart") assert hass.services.has_service("hassio", "addon_stdin") assert hass.services.has_service("hassio", "host_shutdown") assert hass.services.has_service("hassio", "host_reboot") assert hass.services.has_service("hassio", "host_reboot") assert hass.services.has_service("hassio", "snapshot_full") assert hass.services.has_service("hassio", "snapshot_partial") assert hass.services.has_service("hassio", "restore_full") assert hass.services.has_service("hassio", "restore_partial") async def test_service_calls(hassio_env, hass, aioclient_mock): """Call service and check the API calls behind that.""" assert await async_setup_component(hass, "hassio", {}) aioclient_mock.post("http://127.0.0.1/addons/test/start", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/addons/test/stop", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/addons/test/restart", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/addons/test/stdin", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/host/shutdown", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/host/reboot", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/snapshots/new/full", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/snapshots/new/partial", json={"result": "ok"}) aioclient_mock.post( "http://127.0.0.1/snapshots/test/restore/full", json={"result": "ok"} ) aioclient_mock.post( "http://127.0.0.1/snapshots/test/restore/partial", json={"result": "ok"} ) await hass.services.async_call("hassio", "addon_start", {"addon": "test"}) await hass.services.async_call("hassio", "addon_stop", {"addon": "test"}) await hass.services.async_call("hassio", "addon_restart", {"addon": "test"}) await hass.services.async_call( "hassio", "addon_stdin", {"addon": "test", "input": "test"} ) await hass.async_block_till_done() assert aioclient_mock.call_count == 7 assert aioclient_mock.mock_calls[-1][2] == "test" await hass.services.async_call("hassio", "host_shutdown", {}) await hass.services.async_call("hassio", "host_reboot", {}) await hass.async_block_till_done() assert aioclient_mock.call_count == 9 await hass.services.async_call("hassio", "snapshot_full", {}) await hass.services.async_call( "hassio", "snapshot_partial", {"addons": ["test"], "folders": ["ssl"], "password": "123456"}, ) await hass.async_block_till_done() assert aioclient_mock.call_count == 11 assert aioclient_mock.mock_calls[-1][2] == { "addons": ["test"], "folders": ["ssl"], "password": "123456", } await hass.services.async_call("hassio", "restore_full", {"snapshot": "test"}) await hass.services.async_call( "hassio", "restore_partial", { "snapshot": "test", "homeassistant": False, "addons": ["test"], "folders": ["ssl"], "password": "123456", }, ) await hass.async_block_till_done() assert aioclient_mock.call_count == 13 assert aioclient_mock.mock_calls[-1][2] == { "addons": ["test"], "folders": ["ssl"], "homeassistant": False, "password": "123456", } async def test_service_calls_core(hassio_env, hass, aioclient_mock): """Call core service and check the API calls behind that.""" assert await async_setup_component(hass, "hassio", {}) aioclient_mock.post("http://127.0.0.1/homeassistant/restart", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/homeassistant/stop", json={"result": "ok"}) await hass.services.async_call("homeassistant", "stop") await hass.async_block_till_done() assert aioclient_mock.call_count == 4 await hass.services.async_call("homeassistant", "check_config") await hass.async_block_till_done() assert aioclient_mock.call_count == 4 with patch( "homeassistant.config.async_check_ha_config_file", return_value=None ) as mock_check_config: await hass.services.async_call("homeassistant", "restart") await hass.async_block_till_done() assert mock_check_config.called assert aioclient_mock.call_count == 5
pschmitt/home-assistant
tests/components/hassio/test_init.py
homeassistant/components/switch/__init__.py
"""Support for Lutron Caseta shades.""" import logging from homeassistant.components.cover import ( ATTR_POSITION, DOMAIN, SUPPORT_CLOSE, SUPPORT_OPEN, SUPPORT_SET_POSITION, CoverEntity, ) from . import DOMAIN as CASETA_DOMAIN, LutronCasetaDevice _LOGGER = logging.getLogger(__name__) async def async_setup_entry(hass, config_entry, async_add_entities): """Set up the Lutron Caseta cover platform. Adds shades from the Caseta bridge associated with the config_entry as cover entities. """ entities = [] bridge = hass.data[CASETA_DOMAIN][config_entry.entry_id] cover_devices = bridge.get_devices_by_domain(DOMAIN) for cover_device in cover_devices: entity = LutronCasetaCover(cover_device, bridge) entities.append(entity) async_add_entities(entities, True) class LutronCasetaCover(LutronCasetaDevice, CoverEntity): """Representation of a Lutron shade.""" @property def supported_features(self): """Flag supported features.""" return SUPPORT_OPEN | SUPPORT_CLOSE | SUPPORT_SET_POSITION @property def is_closed(self): """Return if the cover is closed.""" return self._device["current_state"] < 1 @property def current_cover_position(self): """Return the current position of cover.""" return self._device["current_state"] async def async_close_cover(self, **kwargs): """Close the cover.""" self._smartbridge.set_value(self.device_id, 0) async def async_open_cover(self, **kwargs): """Open the cover.""" self._smartbridge.set_value(self.device_id, 100) async def async_set_cover_position(self, **kwargs): """Move the shade to a specific position.""" if ATTR_POSITION in kwargs: position = kwargs[ATTR_POSITION] self._smartbridge.set_value(self.device_id, position) async def async_update(self): """Call when forcing a refresh of the device.""" self._device = self._smartbridge.get_device_by_id(self.device_id) _LOGGER.debug(self._device)
"""The tests for the hassio component.""" import os import pytest from homeassistant.auth.const import GROUP_ID_ADMIN from homeassistant.components import frontend from homeassistant.components.hassio import STORAGE_KEY from homeassistant.setup import async_setup_component from tests.async_mock import patch MOCK_ENVIRON = {"HASSIO": "127.0.0.1", "HASSIO_TOKEN": "abcdefgh"} @pytest.fixture(autouse=True) def mock_all(aioclient_mock): """Mock all setup requests.""" aioclient_mock.post("http://127.0.0.1/homeassistant/options", json={"result": "ok"}) aioclient_mock.get("http://127.0.0.1/supervisor/ping", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/supervisor/options", json={"result": "ok"}) aioclient_mock.get( "http://127.0.0.1/info", json={ "result": "ok", "data": {"supervisor": "222", "homeassistant": "0.110.0", "hassos": None}, }, ) aioclient_mock.get( "http://127.0.0.1/host/info", json={ "result": "ok", "data": { "result": "ok", "data": { "chassis": "vm", "operating_system": "Debian GNU/Linux 10 (buster)", "kernel": "4.19.0-6-amd64", }, }, }, ) aioclient_mock.get( "http://127.0.0.1/ingress/panels", json={"result": "ok", "data": {"panels": {}}} ) async def test_setup_api_ping(hass, aioclient_mock): """Test setup with API ping.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {}) assert result assert aioclient_mock.call_count == 6 assert hass.components.hassio.get_homeassistant_version() == "0.110.0" assert hass.components.hassio.is_hassio() async def test_setup_api_panel(hass, aioclient_mock): """Test setup with API ping.""" assert await async_setup_component(hass, "frontend", {}) with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {}) assert result panels = hass.data[frontend.DATA_PANELS] assert panels.get("hassio").to_response() == { "component_name": "custom", "icon": "hass:home-assistant", "title": "Supervisor", "url_path": "hassio", "require_admin": True, "config": { "_panel_custom": { "embed_iframe": True, "js_url": "/api/hassio/app/entrypoint.js", "name": "hassio-main", "trust_external": False, } }, } async def test_setup_api_push_api_data(hass, aioclient_mock): """Test setup with API push.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component( hass, "hassio", {"http": {"server_port": 9999}, "hassio": {}} ) assert result assert aioclient_mock.call_count == 6 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 9999 assert aioclient_mock.mock_calls[1][2]["watchdog"] async def test_setup_api_push_api_data_server_host(hass, aioclient_mock): """Test setup with API push with active server host.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component( hass, "hassio", {"http": {"server_port": 9999, "server_host": "127.0.0.1"}, "hassio": {}}, ) assert result assert aioclient_mock.call_count == 6 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 9999 assert not aioclient_mock.mock_calls[1][2]["watchdog"] async def test_setup_api_push_api_data_default(hass, aioclient_mock, hass_storage): """Test setup with API push default data.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"http": {}, "hassio": {}}) assert result assert aioclient_mock.call_count == 6 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 8123 refresh_token = aioclient_mock.mock_calls[1][2]["refresh_token"] hassio_user = await hass.auth.async_get_user( hass_storage[STORAGE_KEY]["data"]["hassio_user"] ) assert hassio_user is not None assert hassio_user.system_generated assert len(hassio_user.groups) == 1 assert hassio_user.groups[0].id == GROUP_ID_ADMIN for token in hassio_user.refresh_tokens.values(): if token.token == refresh_token: break else: assert False, "refresh token not found" async def test_setup_adds_admin_group_to_user(hass, aioclient_mock, hass_storage): """Test setup with API push default data.""" # Create user without admin user = await hass.auth.async_create_system_user("Hass.io") assert not user.is_admin await hass.auth.async_create_refresh_token(user) hass_storage[STORAGE_KEY] = { "data": {"hassio_user": user.id}, "key": STORAGE_KEY, "version": 1, } with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"http": {}, "hassio": {}}) assert result assert user.is_admin async def test_setup_api_existing_hassio_user(hass, aioclient_mock, hass_storage): """Test setup with API push default data.""" user = await hass.auth.async_create_system_user("Hass.io test") token = await hass.auth.async_create_refresh_token(user) hass_storage[STORAGE_KEY] = {"version": 1, "data": {"hassio_user": user.id}} with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"http": {}, "hassio": {}}) assert result assert aioclient_mock.call_count == 6 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 8123 assert aioclient_mock.mock_calls[1][2]["refresh_token"] == token.token async def test_setup_core_push_timezone(hass, aioclient_mock): """Test setup with API push default data.""" hass.config.time_zone = "testzone" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"hassio": {}}) assert result assert aioclient_mock.call_count == 6 assert aioclient_mock.mock_calls[2][2]["timezone"] == "testzone" await hass.config.async_update(time_zone="America/New_York") await hass.async_block_till_done() assert aioclient_mock.mock_calls[-1][2]["timezone"] == "America/New_York" async def test_setup_hassio_no_additional_data(hass, aioclient_mock): """Test setup with API push default data.""" with patch.dict(os.environ, MOCK_ENVIRON), patch.dict( os.environ, {"HASSIO_TOKEN": "123456"} ): result = await async_setup_component(hass, "hassio", {"hassio": {}}) assert result assert aioclient_mock.call_count == 6 assert aioclient_mock.mock_calls[-1][3]["X-Hassio-Key"] == "123456" async def test_fail_setup_without_environ_var(hass): """Fail setup if no environ variable set.""" with patch.dict(os.environ, {}, clear=True): result = await async_setup_component(hass, "hassio", {}) assert not result async def test_warn_when_cannot_connect(hass, caplog): """Fail warn when we cannot connect.""" with patch.dict(os.environ, MOCK_ENVIRON), patch( "homeassistant.components.hassio.HassIO.is_connected", return_value=None, ): result = await async_setup_component(hass, "hassio", {}) assert result assert hass.components.hassio.is_hassio() assert "Not connected with Hass.io / system too busy!" in caplog.text async def test_service_register(hassio_env, hass): """Check if service will be setup.""" assert await async_setup_component(hass, "hassio", {}) assert hass.services.has_service("hassio", "addon_start") assert hass.services.has_service("hassio", "addon_stop") assert hass.services.has_service("hassio", "addon_restart") assert hass.services.has_service("hassio", "addon_stdin") assert hass.services.has_service("hassio", "host_shutdown") assert hass.services.has_service("hassio", "host_reboot") assert hass.services.has_service("hassio", "host_reboot") assert hass.services.has_service("hassio", "snapshot_full") assert hass.services.has_service("hassio", "snapshot_partial") assert hass.services.has_service("hassio", "restore_full") assert hass.services.has_service("hassio", "restore_partial") async def test_service_calls(hassio_env, hass, aioclient_mock): """Call service and check the API calls behind that.""" assert await async_setup_component(hass, "hassio", {}) aioclient_mock.post("http://127.0.0.1/addons/test/start", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/addons/test/stop", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/addons/test/restart", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/addons/test/stdin", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/host/shutdown", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/host/reboot", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/snapshots/new/full", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/snapshots/new/partial", json={"result": "ok"}) aioclient_mock.post( "http://127.0.0.1/snapshots/test/restore/full", json={"result": "ok"} ) aioclient_mock.post( "http://127.0.0.1/snapshots/test/restore/partial", json={"result": "ok"} ) await hass.services.async_call("hassio", "addon_start", {"addon": "test"}) await hass.services.async_call("hassio", "addon_stop", {"addon": "test"}) await hass.services.async_call("hassio", "addon_restart", {"addon": "test"}) await hass.services.async_call( "hassio", "addon_stdin", {"addon": "test", "input": "test"} ) await hass.async_block_till_done() assert aioclient_mock.call_count == 7 assert aioclient_mock.mock_calls[-1][2] == "test" await hass.services.async_call("hassio", "host_shutdown", {}) await hass.services.async_call("hassio", "host_reboot", {}) await hass.async_block_till_done() assert aioclient_mock.call_count == 9 await hass.services.async_call("hassio", "snapshot_full", {}) await hass.services.async_call( "hassio", "snapshot_partial", {"addons": ["test"], "folders": ["ssl"], "password": "123456"}, ) await hass.async_block_till_done() assert aioclient_mock.call_count == 11 assert aioclient_mock.mock_calls[-1][2] == { "addons": ["test"], "folders": ["ssl"], "password": "123456", } await hass.services.async_call("hassio", "restore_full", {"snapshot": "test"}) await hass.services.async_call( "hassio", "restore_partial", { "snapshot": "test", "homeassistant": False, "addons": ["test"], "folders": ["ssl"], "password": "123456", }, ) await hass.async_block_till_done() assert aioclient_mock.call_count == 13 assert aioclient_mock.mock_calls[-1][2] == { "addons": ["test"], "folders": ["ssl"], "homeassistant": False, "password": "123456", } async def test_service_calls_core(hassio_env, hass, aioclient_mock): """Call core service and check the API calls behind that.""" assert await async_setup_component(hass, "hassio", {}) aioclient_mock.post("http://127.0.0.1/homeassistant/restart", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/homeassistant/stop", json={"result": "ok"}) await hass.services.async_call("homeassistant", "stop") await hass.async_block_till_done() assert aioclient_mock.call_count == 4 await hass.services.async_call("homeassistant", "check_config") await hass.async_block_till_done() assert aioclient_mock.call_count == 4 with patch( "homeassistant.config.async_check_ha_config_file", return_value=None ) as mock_check_config: await hass.services.async_call("homeassistant", "restart") await hass.async_block_till_done() assert mock_check_config.called assert aioclient_mock.call_count == 5
pschmitt/home-assistant
tests/components/hassio/test_init.py
homeassistant/components/lutron_caseta/cover.py
"""Support for (EMEA/EU-based) Honeywell TCC climate systems. Such systems include evohome, Round Thermostat, and others. """ from datetime import datetime as dt, timedelta import logging import re from typing import Any, Dict, Optional, Tuple import aiohttp.client_exceptions import evohomeasync import evohomeasync2 import voluptuous as vol from homeassistant.const import ( ATTR_ENTITY_ID, CONF_PASSWORD, CONF_SCAN_INTERVAL, CONF_USERNAME, HTTP_SERVICE_UNAVAILABLE, HTTP_TOO_MANY_REQUESTS, TEMP_CELSIUS, ) from homeassistant.core import callback from homeassistant.helpers.aiohttp_client import async_get_clientsession import homeassistant.helpers.config_validation as cv from homeassistant.helpers.discovery import async_load_platform from homeassistant.helpers.dispatcher import ( async_dispatcher_connect, async_dispatcher_send, ) from homeassistant.helpers.entity import Entity from homeassistant.helpers.service import verify_domain_control from homeassistant.helpers.typing import ConfigType, HomeAssistantType import homeassistant.util.dt as dt_util from .const import DOMAIN, EVO_FOLLOW, GWS, STORAGE_KEY, STORAGE_VER, TCS, UTC_OFFSET _LOGGER = logging.getLogger(__name__) ACCESS_TOKEN = "access_token" ACCESS_TOKEN_EXPIRES = "access_token_expires" REFRESH_TOKEN = "refresh_token" USER_DATA = "user_data" CONF_LOCATION_IDX = "location_idx" SCAN_INTERVAL_DEFAULT = timedelta(seconds=300) SCAN_INTERVAL_MINIMUM = timedelta(seconds=60) CONFIG_SCHEMA = vol.Schema( { DOMAIN: vol.Schema( { vol.Required(CONF_USERNAME): cv.string, vol.Required(CONF_PASSWORD): cv.string, vol.Optional(CONF_LOCATION_IDX, default=0): cv.positive_int, vol.Optional( CONF_SCAN_INTERVAL, default=SCAN_INTERVAL_DEFAULT ): vol.All(cv.time_period, vol.Range(min=SCAN_INTERVAL_MINIMUM)), } ) }, extra=vol.ALLOW_EXTRA, ) ATTR_SYSTEM_MODE = "mode" ATTR_DURATION_DAYS = "period" ATTR_DURATION_HOURS = "duration" ATTR_ZONE_TEMP = "setpoint" ATTR_DURATION_UNTIL = "duration" SVC_REFRESH_SYSTEM = "refresh_system" SVC_SET_SYSTEM_MODE = "set_system_mode" SVC_RESET_SYSTEM = "reset_system" SVC_SET_ZONE_OVERRIDE = "set_zone_override" SVC_RESET_ZONE_OVERRIDE = "clear_zone_override" RESET_ZONE_OVERRIDE_SCHEMA = vol.Schema({vol.Required(ATTR_ENTITY_ID): cv.entity_id}) SET_ZONE_OVERRIDE_SCHEMA = vol.Schema( { vol.Required(ATTR_ENTITY_ID): cv.entity_id, vol.Required(ATTR_ZONE_TEMP): vol.All( vol.Coerce(float), vol.Range(min=4.0, max=35.0) ), vol.Optional(ATTR_DURATION_UNTIL): vol.All( cv.time_period, vol.Range(min=timedelta(days=0), max=timedelta(days=1)) ), } ) # system mode schemas are built dynamically, below def _dt_local_to_aware(dt_naive: dt) -> dt: dt_aware = dt_util.now() + (dt_naive - dt.now()) if dt_aware.microsecond >= 500000: dt_aware += timedelta(seconds=1) return dt_aware.replace(microsecond=0) def _dt_aware_to_naive(dt_aware: dt) -> dt: dt_naive = dt.now() + (dt_aware - dt_util.now()) if dt_naive.microsecond >= 500000: dt_naive += timedelta(seconds=1) return dt_naive.replace(microsecond=0) def convert_until(status_dict: dict, until_key: str) -> None: """Reformat a dt str from "%Y-%m-%dT%H:%M:%SZ" as local/aware/isoformat.""" if until_key in status_dict: # only present for certain modes dt_utc_naive = dt_util.parse_datetime(status_dict[until_key]) status_dict[until_key] = dt_util.as_local(dt_utc_naive).isoformat() def convert_dict(dictionary: Dict[str, Any]) -> Dict[str, Any]: """Recursively convert a dict's keys to snake_case.""" def convert_key(key: str) -> str: """Convert a string to snake_case.""" string = re.sub(r"[\-\.\s]", "_", str(key)) return (string[0]).lower() + re.sub( r"[A-Z]", lambda matched: f"_{matched.group(0).lower()}", string[1:] ) return { (convert_key(k) if isinstance(k, str) else k): ( convert_dict(v) if isinstance(v, dict) else v ) for k, v in dictionary.items() } def _handle_exception(err) -> bool: """Return False if the exception can't be ignored.""" try: raise err except evohomeasync2.AuthenticationError: _LOGGER.error( "Failed to authenticate with the vendor's server. " "Check your network and the vendor's service status page. " "Also check that your username and password are correct. " "Message is: %s", err, ) return False except aiohttp.ClientConnectionError: # this appears to be a common occurrence with the vendor's servers _LOGGER.warning( "Unable to connect with the vendor's server. " "Check your network and the vendor's service status page. " "Message is: %s", err, ) return False except aiohttp.ClientResponseError: if err.status == HTTP_SERVICE_UNAVAILABLE: _LOGGER.warning( "The vendor says their server is currently unavailable. " "Check the vendor's service status page" ) return False if err.status == HTTP_TOO_MANY_REQUESTS: _LOGGER.warning( "The vendor's API rate limit has been exceeded. " "If this message persists, consider increasing the %s", CONF_SCAN_INTERVAL, ) return False raise # we don't expect/handle any other Exceptions async def async_setup(hass: HomeAssistantType, config: ConfigType) -> bool: """Create a (EMEA/EU-based) Honeywell TCC system.""" async def load_auth_tokens(store) -> Tuple[Dict, Optional[Dict]]: app_storage = await store.async_load() tokens = dict(app_storage if app_storage else {}) if tokens.pop(CONF_USERNAME, None) != config[DOMAIN][CONF_USERNAME]: # any tokens won't be valid, and store might be be corrupt await store.async_save({}) return ({}, None) # evohomeasync2 requires naive/local datetimes as strings if tokens.get(ACCESS_TOKEN_EXPIRES) is not None: tokens[ACCESS_TOKEN_EXPIRES] = _dt_aware_to_naive( dt_util.parse_datetime(tokens[ACCESS_TOKEN_EXPIRES]) ) user_data = tokens.pop(USER_DATA, None) return (tokens, user_data) store = hass.helpers.storage.Store(STORAGE_VER, STORAGE_KEY) tokens, user_data = await load_auth_tokens(store) client_v2 = evohomeasync2.EvohomeClient( config[DOMAIN][CONF_USERNAME], config[DOMAIN][CONF_PASSWORD], **tokens, session=async_get_clientsession(hass), ) try: await client_v2.login() except (aiohttp.ClientError, evohomeasync2.AuthenticationError) as err: _handle_exception(err) return False finally: config[DOMAIN][CONF_PASSWORD] = "REDACTED" loc_idx = config[DOMAIN][CONF_LOCATION_IDX] try: loc_config = client_v2.installation_info[loc_idx] except IndexError: _LOGGER.error( "Config error: '%s' = %s, but the valid range is 0-%s. " "Unable to continue. Fix any configuration errors and restart HA", CONF_LOCATION_IDX, loc_idx, len(client_v2.installation_info) - 1, ) return False if _LOGGER.isEnabledFor(logging.DEBUG): _config = {"locationInfo": {"timeZone": None}, GWS: [{TCS: None}]} _config["locationInfo"]["timeZone"] = loc_config["locationInfo"]["timeZone"] _config[GWS][0][TCS] = loc_config[GWS][0][TCS] _LOGGER.debug("Config = %s", _config) client_v1 = evohomeasync.EvohomeClient( client_v2.username, client_v2.password, user_data=user_data, session=async_get_clientsession(hass), ) hass.data[DOMAIN] = {} hass.data[DOMAIN]["broker"] = broker = EvoBroker( hass, client_v2, client_v1, store, config[DOMAIN] ) await broker.save_auth_tokens() await broker.async_update() # get initial state hass.async_create_task(async_load_platform(hass, "climate", DOMAIN, {}, config)) if broker.tcs.hotwater: hass.async_create_task( async_load_platform(hass, "water_heater", DOMAIN, {}, config) ) hass.helpers.event.async_track_time_interval( broker.async_update, config[DOMAIN][CONF_SCAN_INTERVAL] ) setup_service_functions(hass, broker) return True @callback def setup_service_functions(hass: HomeAssistantType, broker): """Set up the service handlers for the system/zone operating modes. Not all Honeywell TCC-compatible systems support all operating modes. In addition, each mode will require any of four distinct service schemas. This has to be enumerated before registering the appropriate handlers. It appears that all TCC-compatible systems support the same three zones modes. """ @verify_domain_control(hass, DOMAIN) async def force_refresh(call) -> None: """Obtain the latest state data via the vendor's RESTful API.""" await broker.async_update() @verify_domain_control(hass, DOMAIN) async def set_system_mode(call) -> None: """Set the system mode.""" payload = { "unique_id": broker.tcs.systemId, "service": call.service, "data": call.data, } async_dispatcher_send(hass, DOMAIN, payload) @verify_domain_control(hass, DOMAIN) async def set_zone_override(call) -> None: """Set the zone override (setpoint).""" entity_id = call.data[ATTR_ENTITY_ID] registry = await hass.helpers.entity_registry.async_get_registry() registry_entry = registry.async_get(entity_id) if registry_entry is None or registry_entry.platform != DOMAIN: raise ValueError(f"'{entity_id}' is not a known {DOMAIN} entity") if registry_entry.domain != "climate": raise ValueError(f"'{entity_id}' is not an {DOMAIN} controller/zone") payload = { "unique_id": registry_entry.unique_id, "service": call.service, "data": call.data, } async_dispatcher_send(hass, DOMAIN, payload) hass.services.async_register(DOMAIN, SVC_REFRESH_SYSTEM, force_refresh) # Enumerate which operating modes are supported by this system modes = broker.config["allowedSystemModes"] # Not all systems support "AutoWithReset": register this handler only if required if [m["systemMode"] for m in modes if m["systemMode"] == "AutoWithReset"]: hass.services.async_register(DOMAIN, SVC_RESET_SYSTEM, set_system_mode) system_mode_schemas = [] modes = [m for m in modes if m["systemMode"] != "AutoWithReset"] # Permanent-only modes will use this schema perm_modes = [m["systemMode"] for m in modes if not m["canBeTemporary"]] if perm_modes: # any of: "Auto", "HeatingOff": permanent only schema = vol.Schema({vol.Required(ATTR_SYSTEM_MODE): vol.In(perm_modes)}) system_mode_schemas.append(schema) modes = [m for m in modes if m["canBeTemporary"]] # These modes are set for a number of hours (or indefinitely): use this schema temp_modes = [m["systemMode"] for m in modes if m["timingMode"] == "Duration"] if temp_modes: # any of: "AutoWithEco", permanent or for 0-24 hours schema = vol.Schema( { vol.Required(ATTR_SYSTEM_MODE): vol.In(temp_modes), vol.Optional(ATTR_DURATION_HOURS): vol.All( cv.time_period, vol.Range(min=timedelta(hours=0), max=timedelta(hours=24)), ), } ) system_mode_schemas.append(schema) # These modes are set for a number of days (or indefinitely): use this schema temp_modes = [m["systemMode"] for m in modes if m["timingMode"] == "Period"] if temp_modes: # any of: "Away", "Custom", "DayOff", permanent or for 1-99 days schema = vol.Schema( { vol.Required(ATTR_SYSTEM_MODE): vol.In(temp_modes), vol.Optional(ATTR_DURATION_DAYS): vol.All( cv.time_period, vol.Range(min=timedelta(days=1), max=timedelta(days=99)), ), } ) system_mode_schemas.append(schema) if system_mode_schemas: hass.services.async_register( DOMAIN, SVC_SET_SYSTEM_MODE, set_system_mode, schema=vol.Any(*system_mode_schemas), ) # The zone modes are consistent across all systems and use the same schema hass.services.async_register( DOMAIN, SVC_RESET_ZONE_OVERRIDE, set_zone_override, schema=RESET_ZONE_OVERRIDE_SCHEMA, ) hass.services.async_register( DOMAIN, SVC_SET_ZONE_OVERRIDE, set_zone_override, schema=SET_ZONE_OVERRIDE_SCHEMA, ) class EvoBroker: """Container for evohome client and data.""" def __init__(self, hass, client, client_v1, store, params) -> None: """Initialize the evohome client and its data structure.""" self.hass = hass self.client = client self.client_v1 = client_v1 self._store = store self.params = params loc_idx = params[CONF_LOCATION_IDX] self.config = client.installation_info[loc_idx][GWS][0][TCS][0] self.tcs = client.locations[loc_idx]._gateways[0]._control_systems[0] self.tcs_utc_offset = timedelta( minutes=client.locations[loc_idx].timeZone[UTC_OFFSET] ) self.temps = {} async def save_auth_tokens(self) -> None: """Save access tokens and session IDs to the store for later use.""" # evohomeasync2 uses naive/local datetimes access_token_expires = _dt_local_to_aware(self.client.access_token_expires) app_storage = {CONF_USERNAME: self.client.username} app_storage[REFRESH_TOKEN] = self.client.refresh_token app_storage[ACCESS_TOKEN] = self.client.access_token app_storage[ACCESS_TOKEN_EXPIRES] = access_token_expires.isoformat() if self.client_v1 and self.client_v1.user_data: app_storage[USER_DATA] = { "userInfo": {"userID": self.client_v1.user_data["userInfo"]["userID"]}, "sessionId": self.client_v1.user_data["sessionId"], } else: app_storage[USER_DATA] = None await self._store.async_save(app_storage) async def call_client_api(self, api_function, refresh=True) -> Any: """Call a client API.""" try: result = await api_function except (aiohttp.ClientError, evohomeasync2.AuthenticationError) as err: if not _handle_exception(err): return if refresh: self.hass.helpers.event.async_call_later(1, self.async_update()) return result async def _update_v1(self, *args, **kwargs) -> None: """Get the latest high-precision temperatures of the default Location.""" def get_session_id(client_v1) -> Optional[str]: user_data = client_v1.user_data if client_v1 else None return user_data.get("sessionId") if user_data else None session_id = get_session_id(self.client_v1) try: temps = list(await self.client_v1.temperatures(force_refresh=True)) except aiohttp.ClientError as err: _LOGGER.warning( "Unable to obtain the latest high-precision temperatures. " "Check your network and the vendor's service status page. " "Proceeding with low-precision temperatures. " "Message is: %s", err, ) self.temps = None # these are now stale, will fall back to v2 temps else: if ( str(self.client_v1.location_id) != self.client.locations[self.params[CONF_LOCATION_IDX]].locationId ): _LOGGER.warning( "The v2 API's configured location doesn't match " "the v1 API's default location (there is more than one location), " "so the high-precision feature will be disabled" ) self.client_v1 = self.temps = None else: self.temps = {str(i["id"]): i["temp"] for i in temps} _LOGGER.debug("Temperatures = %s", self.temps) if session_id != get_session_id(self.client_v1): await self.save_auth_tokens() async def _update_v2(self, *args, **kwargs) -> None: """Get the latest modes, temperatures, setpoints of a Location.""" access_token = self.client.access_token loc_idx = self.params[CONF_LOCATION_IDX] try: status = await self.client.locations[loc_idx].status() except (aiohttp.ClientError, evohomeasync2.AuthenticationError) as err: _handle_exception(err) else: async_dispatcher_send(self.hass, DOMAIN) _LOGGER.debug("Status = %s", status) if access_token != self.client.access_token: await self.save_auth_tokens() async def async_update(self, *args, **kwargs) -> None: """Get the latest state data of an entire Honeywell TCC Location. This includes state data for a Controller and all its child devices, such as the operating mode of the Controller and the current temp of its children (e.g. Zones, DHW controller). """ await self._update_v2() if self.client_v1: await self._update_v1() # inform the evohome devices that state data has been updated async_dispatcher_send(self.hass, DOMAIN) class EvoDevice(Entity): """Base for any evohome device. This includes the Controller, (up to 12) Heating Zones and (optionally) a DHW controller. """ def __init__(self, evo_broker, evo_device) -> None: """Initialize the evohome entity.""" self._evo_device = evo_device self._evo_broker = evo_broker self._evo_tcs = evo_broker.tcs self._unique_id = self._name = self._icon = self._precision = None self._supported_features = None self._device_state_attrs = {} async def async_refresh(self, payload: Optional[dict] = None) -> None: """Process any signals.""" if payload is None: self.async_schedule_update_ha_state(force_refresh=True) return if payload["unique_id"] != self._unique_id: return if payload["service"] in [SVC_SET_ZONE_OVERRIDE, SVC_RESET_ZONE_OVERRIDE]: await self.async_zone_svc_request(payload["service"], payload["data"]) return await self.async_tcs_svc_request(payload["service"], payload["data"]) async def async_tcs_svc_request(self, service: dict, data: dict) -> None: """Process a service request (system mode) for a controller.""" raise NotImplementedError async def async_zone_svc_request(self, service: dict, data: dict) -> None: """Process a service request (setpoint override) for a zone.""" raise NotImplementedError @property def should_poll(self) -> bool: """Evohome entities should not be polled.""" return False @property def unique_id(self) -> Optional[str]: """Return a unique ID.""" return self._unique_id @property def name(self) -> str: """Return the name of the evohome entity.""" return self._name @property def device_state_attributes(self) -> Dict[str, Any]: """Return the evohome-specific state attributes.""" status = self._device_state_attrs if "systemModeStatus" in status: convert_until(status["systemModeStatus"], "timeUntil") if "setpointStatus" in status: convert_until(status["setpointStatus"], "until") if "stateStatus" in status: convert_until(status["stateStatus"], "until") return {"status": convert_dict(status)} @property def icon(self) -> str: """Return the icon to use in the frontend UI.""" return self._icon @property def supported_features(self) -> int: """Get the flag of supported features of the device.""" return self._supported_features async def async_added_to_hass(self) -> None: """Run when entity about to be added to hass.""" async_dispatcher_connect(self.hass, DOMAIN, self.async_refresh) @property def precision(self) -> float: """Return the temperature precision to use in the frontend UI.""" return self._precision @property def temperature_unit(self) -> str: """Return the temperature unit to use in the frontend UI.""" return TEMP_CELSIUS class EvoChild(EvoDevice): """Base for any evohome child. This includes (up to 12) Heating Zones and (optionally) a DHW controller. """ def __init__(self, evo_broker, evo_device) -> None: """Initialize a evohome Controller (hub).""" super().__init__(evo_broker, evo_device) self._schedule = {} self._setpoints = {} @property def current_temperature(self) -> Optional[float]: """Return the current temperature of a Zone.""" if self._evo_broker.temps: if self._evo_broker.temps[self._evo_device.zoneId] != 128: return self._evo_broker.temps[self._evo_device.zoneId] if self._evo_device.temperatureStatus["isAvailable"]: return self._evo_device.temperatureStatus["temperature"] @property def setpoints(self) -> Dict[str, Any]: """Return the current/next setpoints from the schedule. Only Zones & DHW controllers (but not the TCS) can have schedules. """ def _dt_evo_to_aware(dt_naive: dt, utc_offset: timedelta) -> dt: dt_aware = dt_naive.replace(tzinfo=dt_util.UTC) - utc_offset return dt_util.as_local(dt_aware) if not self._schedule["DailySchedules"]: return {} # no schedule {'DailySchedules': []}, so no scheduled setpoints day_time = dt_util.now() day_of_week = int(day_time.strftime("%w")) # 0 is Sunday time_of_day = day_time.strftime("%H:%M:%S") try: # Iterate today's switchpoints until past the current time of day... day = self._schedule["DailySchedules"][day_of_week] sp_idx = -1 # last switchpoint of the day before for i, tmp in enumerate(day["Switchpoints"]): if time_of_day > tmp["TimeOfDay"]: sp_idx = i # current setpoint else: break # Did the current SP start yesterday? Does the next start SP tomorrow? this_sp_day = -1 if sp_idx == -1 else 0 next_sp_day = 1 if sp_idx + 1 == len(day["Switchpoints"]) else 0 for key, offset, idx in [ ("this", this_sp_day, sp_idx), ("next", next_sp_day, (sp_idx + 1) * (1 - next_sp_day)), ]: sp_date = (day_time + timedelta(days=offset)).strftime("%Y-%m-%d") day = self._schedule["DailySchedules"][(day_of_week + offset) % 7] switchpoint = day["Switchpoints"][idx] dt_aware = _dt_evo_to_aware( dt_util.parse_datetime(f"{sp_date}T{switchpoint['TimeOfDay']}"), self._evo_broker.tcs_utc_offset, ) self._setpoints[f"{key}_sp_from"] = dt_aware.isoformat() try: self._setpoints[f"{key}_sp_temp"] = switchpoint["heatSetpoint"] except KeyError: self._setpoints[f"{key}_sp_state"] = switchpoint["DhwState"] except IndexError: self._setpoints = {} _LOGGER.warning( "Failed to get setpoints, report as an issue if this error persists", exc_info=True, ) return self._setpoints async def _update_schedule(self) -> None: """Get the latest schedule, if any.""" if "DailySchedules" in self._schedule and not self._schedule["DailySchedules"]: if not self._evo_device.setpointStatus["setpointMode"] == EVO_FOLLOW: return # avoid unnecessary I/O - there's nothing to update self._schedule = await self._evo_broker.call_client_api( self._evo_device.schedule(), refresh=False ) _LOGGER.debug("Schedule['%s'] = %s", self.name, self._schedule) async def async_update(self) -> None: """Get the latest state data.""" next_sp_from = self._setpoints.get("next_sp_from", "2000-01-01T00:00:00+00:00") if dt_util.now() >= dt_util.parse_datetime(next_sp_from): await self._update_schedule() # no schedule, or it's out-of-date self._device_state_attrs = {"setpoints": self.setpoints}
"""The tests for the hassio component.""" import os import pytest from homeassistant.auth.const import GROUP_ID_ADMIN from homeassistant.components import frontend from homeassistant.components.hassio import STORAGE_KEY from homeassistant.setup import async_setup_component from tests.async_mock import patch MOCK_ENVIRON = {"HASSIO": "127.0.0.1", "HASSIO_TOKEN": "abcdefgh"} @pytest.fixture(autouse=True) def mock_all(aioclient_mock): """Mock all setup requests.""" aioclient_mock.post("http://127.0.0.1/homeassistant/options", json={"result": "ok"}) aioclient_mock.get("http://127.0.0.1/supervisor/ping", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/supervisor/options", json={"result": "ok"}) aioclient_mock.get( "http://127.0.0.1/info", json={ "result": "ok", "data": {"supervisor": "222", "homeassistant": "0.110.0", "hassos": None}, }, ) aioclient_mock.get( "http://127.0.0.1/host/info", json={ "result": "ok", "data": { "result": "ok", "data": { "chassis": "vm", "operating_system": "Debian GNU/Linux 10 (buster)", "kernel": "4.19.0-6-amd64", }, }, }, ) aioclient_mock.get( "http://127.0.0.1/ingress/panels", json={"result": "ok", "data": {"panels": {}}} ) async def test_setup_api_ping(hass, aioclient_mock): """Test setup with API ping.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {}) assert result assert aioclient_mock.call_count == 6 assert hass.components.hassio.get_homeassistant_version() == "0.110.0" assert hass.components.hassio.is_hassio() async def test_setup_api_panel(hass, aioclient_mock): """Test setup with API ping.""" assert await async_setup_component(hass, "frontend", {}) with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {}) assert result panels = hass.data[frontend.DATA_PANELS] assert panels.get("hassio").to_response() == { "component_name": "custom", "icon": "hass:home-assistant", "title": "Supervisor", "url_path": "hassio", "require_admin": True, "config": { "_panel_custom": { "embed_iframe": True, "js_url": "/api/hassio/app/entrypoint.js", "name": "hassio-main", "trust_external": False, } }, } async def test_setup_api_push_api_data(hass, aioclient_mock): """Test setup with API push.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component( hass, "hassio", {"http": {"server_port": 9999}, "hassio": {}} ) assert result assert aioclient_mock.call_count == 6 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 9999 assert aioclient_mock.mock_calls[1][2]["watchdog"] async def test_setup_api_push_api_data_server_host(hass, aioclient_mock): """Test setup with API push with active server host.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component( hass, "hassio", {"http": {"server_port": 9999, "server_host": "127.0.0.1"}, "hassio": {}}, ) assert result assert aioclient_mock.call_count == 6 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 9999 assert not aioclient_mock.mock_calls[1][2]["watchdog"] async def test_setup_api_push_api_data_default(hass, aioclient_mock, hass_storage): """Test setup with API push default data.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"http": {}, "hassio": {}}) assert result assert aioclient_mock.call_count == 6 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 8123 refresh_token = aioclient_mock.mock_calls[1][2]["refresh_token"] hassio_user = await hass.auth.async_get_user( hass_storage[STORAGE_KEY]["data"]["hassio_user"] ) assert hassio_user is not None assert hassio_user.system_generated assert len(hassio_user.groups) == 1 assert hassio_user.groups[0].id == GROUP_ID_ADMIN for token in hassio_user.refresh_tokens.values(): if token.token == refresh_token: break else: assert False, "refresh token not found" async def test_setup_adds_admin_group_to_user(hass, aioclient_mock, hass_storage): """Test setup with API push default data.""" # Create user without admin user = await hass.auth.async_create_system_user("Hass.io") assert not user.is_admin await hass.auth.async_create_refresh_token(user) hass_storage[STORAGE_KEY] = { "data": {"hassio_user": user.id}, "key": STORAGE_KEY, "version": 1, } with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"http": {}, "hassio": {}}) assert result assert user.is_admin async def test_setup_api_existing_hassio_user(hass, aioclient_mock, hass_storage): """Test setup with API push default data.""" user = await hass.auth.async_create_system_user("Hass.io test") token = await hass.auth.async_create_refresh_token(user) hass_storage[STORAGE_KEY] = {"version": 1, "data": {"hassio_user": user.id}} with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"http": {}, "hassio": {}}) assert result assert aioclient_mock.call_count == 6 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 8123 assert aioclient_mock.mock_calls[1][2]["refresh_token"] == token.token async def test_setup_core_push_timezone(hass, aioclient_mock): """Test setup with API push default data.""" hass.config.time_zone = "testzone" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"hassio": {}}) assert result assert aioclient_mock.call_count == 6 assert aioclient_mock.mock_calls[2][2]["timezone"] == "testzone" await hass.config.async_update(time_zone="America/New_York") await hass.async_block_till_done() assert aioclient_mock.mock_calls[-1][2]["timezone"] == "America/New_York" async def test_setup_hassio_no_additional_data(hass, aioclient_mock): """Test setup with API push default data.""" with patch.dict(os.environ, MOCK_ENVIRON), patch.dict( os.environ, {"HASSIO_TOKEN": "123456"} ): result = await async_setup_component(hass, "hassio", {"hassio": {}}) assert result assert aioclient_mock.call_count == 6 assert aioclient_mock.mock_calls[-1][3]["X-Hassio-Key"] == "123456" async def test_fail_setup_without_environ_var(hass): """Fail setup if no environ variable set.""" with patch.dict(os.environ, {}, clear=True): result = await async_setup_component(hass, "hassio", {}) assert not result async def test_warn_when_cannot_connect(hass, caplog): """Fail warn when we cannot connect.""" with patch.dict(os.environ, MOCK_ENVIRON), patch( "homeassistant.components.hassio.HassIO.is_connected", return_value=None, ): result = await async_setup_component(hass, "hassio", {}) assert result assert hass.components.hassio.is_hassio() assert "Not connected with Hass.io / system too busy!" in caplog.text async def test_service_register(hassio_env, hass): """Check if service will be setup.""" assert await async_setup_component(hass, "hassio", {}) assert hass.services.has_service("hassio", "addon_start") assert hass.services.has_service("hassio", "addon_stop") assert hass.services.has_service("hassio", "addon_restart") assert hass.services.has_service("hassio", "addon_stdin") assert hass.services.has_service("hassio", "host_shutdown") assert hass.services.has_service("hassio", "host_reboot") assert hass.services.has_service("hassio", "host_reboot") assert hass.services.has_service("hassio", "snapshot_full") assert hass.services.has_service("hassio", "snapshot_partial") assert hass.services.has_service("hassio", "restore_full") assert hass.services.has_service("hassio", "restore_partial") async def test_service_calls(hassio_env, hass, aioclient_mock): """Call service and check the API calls behind that.""" assert await async_setup_component(hass, "hassio", {}) aioclient_mock.post("http://127.0.0.1/addons/test/start", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/addons/test/stop", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/addons/test/restart", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/addons/test/stdin", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/host/shutdown", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/host/reboot", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/snapshots/new/full", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/snapshots/new/partial", json={"result": "ok"}) aioclient_mock.post( "http://127.0.0.1/snapshots/test/restore/full", json={"result": "ok"} ) aioclient_mock.post( "http://127.0.0.1/snapshots/test/restore/partial", json={"result": "ok"} ) await hass.services.async_call("hassio", "addon_start", {"addon": "test"}) await hass.services.async_call("hassio", "addon_stop", {"addon": "test"}) await hass.services.async_call("hassio", "addon_restart", {"addon": "test"}) await hass.services.async_call( "hassio", "addon_stdin", {"addon": "test", "input": "test"} ) await hass.async_block_till_done() assert aioclient_mock.call_count == 7 assert aioclient_mock.mock_calls[-1][2] == "test" await hass.services.async_call("hassio", "host_shutdown", {}) await hass.services.async_call("hassio", "host_reboot", {}) await hass.async_block_till_done() assert aioclient_mock.call_count == 9 await hass.services.async_call("hassio", "snapshot_full", {}) await hass.services.async_call( "hassio", "snapshot_partial", {"addons": ["test"], "folders": ["ssl"], "password": "123456"}, ) await hass.async_block_till_done() assert aioclient_mock.call_count == 11 assert aioclient_mock.mock_calls[-1][2] == { "addons": ["test"], "folders": ["ssl"], "password": "123456", } await hass.services.async_call("hassio", "restore_full", {"snapshot": "test"}) await hass.services.async_call( "hassio", "restore_partial", { "snapshot": "test", "homeassistant": False, "addons": ["test"], "folders": ["ssl"], "password": "123456", }, ) await hass.async_block_till_done() assert aioclient_mock.call_count == 13 assert aioclient_mock.mock_calls[-1][2] == { "addons": ["test"], "folders": ["ssl"], "homeassistant": False, "password": "123456", } async def test_service_calls_core(hassio_env, hass, aioclient_mock): """Call core service and check the API calls behind that.""" assert await async_setup_component(hass, "hassio", {}) aioclient_mock.post("http://127.0.0.1/homeassistant/restart", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/homeassistant/stop", json={"result": "ok"}) await hass.services.async_call("homeassistant", "stop") await hass.async_block_till_done() assert aioclient_mock.call_count == 4 await hass.services.async_call("homeassistant", "check_config") await hass.async_block_till_done() assert aioclient_mock.call_count == 4 with patch( "homeassistant.config.async_check_ha_config_file", return_value=None ) as mock_check_config: await hass.services.async_call("homeassistant", "restart") await hass.async_block_till_done() assert mock_check_config.called assert aioclient_mock.call_count == 5
pschmitt/home-assistant
tests/components/hassio/test_init.py
homeassistant/components/evohome/__init__.py
"""Support for particulate matter sensors connected to a serial port.""" import logging from pmsensor import serial_pm as pm import voluptuous as vol from homeassistant.components.sensor import PLATFORM_SCHEMA from homeassistant.const import CONCENTRATION_MICROGRAMS_PER_CUBIC_METER, CONF_NAME import homeassistant.helpers.config_validation as cv from homeassistant.helpers.entity import Entity _LOGGER = logging.getLogger(__name__) CONF_BRAND = "brand" CONF_SERIAL_DEVICE = "serial_device" PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( { vol.Required(CONF_BRAND): cv.string, vol.Required(CONF_SERIAL_DEVICE): cv.string, vol.Optional(CONF_NAME): cv.string, } ) def setup_platform(hass, config, add_entities, discovery_info=None): """Set up the available PM sensors.""" try: coll = pm.PMDataCollector( config.get(CONF_SERIAL_DEVICE), pm.SUPPORTED_SENSORS[config.get(CONF_BRAND)] ) except KeyError: _LOGGER.error( "Brand %s not supported\n supported brands: %s", config.get(CONF_BRAND), pm.SUPPORTED_SENSORS.keys(), ) return except OSError as err: _LOGGER.error( "Could not open serial connection to %s (%s)", config.get(CONF_SERIAL_DEVICE), err, ) return dev = [] for pmname in coll.supported_values(): if config.get(CONF_NAME) is not None: name = "{} PM{}".format(config.get(CONF_NAME), pmname) else: name = f"PM{pmname}" dev.append(ParticulateMatterSensor(coll, name, pmname)) add_entities(dev) class ParticulateMatterSensor(Entity): """Representation of an Particulate matter sensor.""" def __init__(self, pmDataCollector, name, pmname): """Initialize a new PM sensor.""" self._name = name self._pmname = pmname self._state = None self._collector = pmDataCollector @property def name(self): """Return the name of the sensor.""" return self._name @property def state(self): """Return the state of the sensor.""" return self._state @property def unit_of_measurement(self): """Return the unit of measurement of this entity, if any.""" return CONCENTRATION_MICROGRAMS_PER_CUBIC_METER def update(self): """Read from sensor and update the state.""" _LOGGER.debug("Reading data from PM sensor") try: self._state = self._collector.read_data()[self._pmname] except KeyError: _LOGGER.error("Could not read PM%s value", self._pmname)
"""The tests for the hassio component.""" import os import pytest from homeassistant.auth.const import GROUP_ID_ADMIN from homeassistant.components import frontend from homeassistant.components.hassio import STORAGE_KEY from homeassistant.setup import async_setup_component from tests.async_mock import patch MOCK_ENVIRON = {"HASSIO": "127.0.0.1", "HASSIO_TOKEN": "abcdefgh"} @pytest.fixture(autouse=True) def mock_all(aioclient_mock): """Mock all setup requests.""" aioclient_mock.post("http://127.0.0.1/homeassistant/options", json={"result": "ok"}) aioclient_mock.get("http://127.0.0.1/supervisor/ping", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/supervisor/options", json={"result": "ok"}) aioclient_mock.get( "http://127.0.0.1/info", json={ "result": "ok", "data": {"supervisor": "222", "homeassistant": "0.110.0", "hassos": None}, }, ) aioclient_mock.get( "http://127.0.0.1/host/info", json={ "result": "ok", "data": { "result": "ok", "data": { "chassis": "vm", "operating_system": "Debian GNU/Linux 10 (buster)", "kernel": "4.19.0-6-amd64", }, }, }, ) aioclient_mock.get( "http://127.0.0.1/ingress/panels", json={"result": "ok", "data": {"panels": {}}} ) async def test_setup_api_ping(hass, aioclient_mock): """Test setup with API ping.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {}) assert result assert aioclient_mock.call_count == 6 assert hass.components.hassio.get_homeassistant_version() == "0.110.0" assert hass.components.hassio.is_hassio() async def test_setup_api_panel(hass, aioclient_mock): """Test setup with API ping.""" assert await async_setup_component(hass, "frontend", {}) with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {}) assert result panels = hass.data[frontend.DATA_PANELS] assert panels.get("hassio").to_response() == { "component_name": "custom", "icon": "hass:home-assistant", "title": "Supervisor", "url_path": "hassio", "require_admin": True, "config": { "_panel_custom": { "embed_iframe": True, "js_url": "/api/hassio/app/entrypoint.js", "name": "hassio-main", "trust_external": False, } }, } async def test_setup_api_push_api_data(hass, aioclient_mock): """Test setup with API push.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component( hass, "hassio", {"http": {"server_port": 9999}, "hassio": {}} ) assert result assert aioclient_mock.call_count == 6 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 9999 assert aioclient_mock.mock_calls[1][2]["watchdog"] async def test_setup_api_push_api_data_server_host(hass, aioclient_mock): """Test setup with API push with active server host.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component( hass, "hassio", {"http": {"server_port": 9999, "server_host": "127.0.0.1"}, "hassio": {}}, ) assert result assert aioclient_mock.call_count == 6 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 9999 assert not aioclient_mock.mock_calls[1][2]["watchdog"] async def test_setup_api_push_api_data_default(hass, aioclient_mock, hass_storage): """Test setup with API push default data.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"http": {}, "hassio": {}}) assert result assert aioclient_mock.call_count == 6 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 8123 refresh_token = aioclient_mock.mock_calls[1][2]["refresh_token"] hassio_user = await hass.auth.async_get_user( hass_storage[STORAGE_KEY]["data"]["hassio_user"] ) assert hassio_user is not None assert hassio_user.system_generated assert len(hassio_user.groups) == 1 assert hassio_user.groups[0].id == GROUP_ID_ADMIN for token in hassio_user.refresh_tokens.values(): if token.token == refresh_token: break else: assert False, "refresh token not found" async def test_setup_adds_admin_group_to_user(hass, aioclient_mock, hass_storage): """Test setup with API push default data.""" # Create user without admin user = await hass.auth.async_create_system_user("Hass.io") assert not user.is_admin await hass.auth.async_create_refresh_token(user) hass_storage[STORAGE_KEY] = { "data": {"hassio_user": user.id}, "key": STORAGE_KEY, "version": 1, } with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"http": {}, "hassio": {}}) assert result assert user.is_admin async def test_setup_api_existing_hassio_user(hass, aioclient_mock, hass_storage): """Test setup with API push default data.""" user = await hass.auth.async_create_system_user("Hass.io test") token = await hass.auth.async_create_refresh_token(user) hass_storage[STORAGE_KEY] = {"version": 1, "data": {"hassio_user": user.id}} with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"http": {}, "hassio": {}}) assert result assert aioclient_mock.call_count == 6 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 8123 assert aioclient_mock.mock_calls[1][2]["refresh_token"] == token.token async def test_setup_core_push_timezone(hass, aioclient_mock): """Test setup with API push default data.""" hass.config.time_zone = "testzone" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"hassio": {}}) assert result assert aioclient_mock.call_count == 6 assert aioclient_mock.mock_calls[2][2]["timezone"] == "testzone" await hass.config.async_update(time_zone="America/New_York") await hass.async_block_till_done() assert aioclient_mock.mock_calls[-1][2]["timezone"] == "America/New_York" async def test_setup_hassio_no_additional_data(hass, aioclient_mock): """Test setup with API push default data.""" with patch.dict(os.environ, MOCK_ENVIRON), patch.dict( os.environ, {"HASSIO_TOKEN": "123456"} ): result = await async_setup_component(hass, "hassio", {"hassio": {}}) assert result assert aioclient_mock.call_count == 6 assert aioclient_mock.mock_calls[-1][3]["X-Hassio-Key"] == "123456" async def test_fail_setup_without_environ_var(hass): """Fail setup if no environ variable set.""" with patch.dict(os.environ, {}, clear=True): result = await async_setup_component(hass, "hassio", {}) assert not result async def test_warn_when_cannot_connect(hass, caplog): """Fail warn when we cannot connect.""" with patch.dict(os.environ, MOCK_ENVIRON), patch( "homeassistant.components.hassio.HassIO.is_connected", return_value=None, ): result = await async_setup_component(hass, "hassio", {}) assert result assert hass.components.hassio.is_hassio() assert "Not connected with Hass.io / system too busy!" in caplog.text async def test_service_register(hassio_env, hass): """Check if service will be setup.""" assert await async_setup_component(hass, "hassio", {}) assert hass.services.has_service("hassio", "addon_start") assert hass.services.has_service("hassio", "addon_stop") assert hass.services.has_service("hassio", "addon_restart") assert hass.services.has_service("hassio", "addon_stdin") assert hass.services.has_service("hassio", "host_shutdown") assert hass.services.has_service("hassio", "host_reboot") assert hass.services.has_service("hassio", "host_reboot") assert hass.services.has_service("hassio", "snapshot_full") assert hass.services.has_service("hassio", "snapshot_partial") assert hass.services.has_service("hassio", "restore_full") assert hass.services.has_service("hassio", "restore_partial") async def test_service_calls(hassio_env, hass, aioclient_mock): """Call service and check the API calls behind that.""" assert await async_setup_component(hass, "hassio", {}) aioclient_mock.post("http://127.0.0.1/addons/test/start", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/addons/test/stop", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/addons/test/restart", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/addons/test/stdin", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/host/shutdown", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/host/reboot", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/snapshots/new/full", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/snapshots/new/partial", json={"result": "ok"}) aioclient_mock.post( "http://127.0.0.1/snapshots/test/restore/full", json={"result": "ok"} ) aioclient_mock.post( "http://127.0.0.1/snapshots/test/restore/partial", json={"result": "ok"} ) await hass.services.async_call("hassio", "addon_start", {"addon": "test"}) await hass.services.async_call("hassio", "addon_stop", {"addon": "test"}) await hass.services.async_call("hassio", "addon_restart", {"addon": "test"}) await hass.services.async_call( "hassio", "addon_stdin", {"addon": "test", "input": "test"} ) await hass.async_block_till_done() assert aioclient_mock.call_count == 7 assert aioclient_mock.mock_calls[-1][2] == "test" await hass.services.async_call("hassio", "host_shutdown", {}) await hass.services.async_call("hassio", "host_reboot", {}) await hass.async_block_till_done() assert aioclient_mock.call_count == 9 await hass.services.async_call("hassio", "snapshot_full", {}) await hass.services.async_call( "hassio", "snapshot_partial", {"addons": ["test"], "folders": ["ssl"], "password": "123456"}, ) await hass.async_block_till_done() assert aioclient_mock.call_count == 11 assert aioclient_mock.mock_calls[-1][2] == { "addons": ["test"], "folders": ["ssl"], "password": "123456", } await hass.services.async_call("hassio", "restore_full", {"snapshot": "test"}) await hass.services.async_call( "hassio", "restore_partial", { "snapshot": "test", "homeassistant": False, "addons": ["test"], "folders": ["ssl"], "password": "123456", }, ) await hass.async_block_till_done() assert aioclient_mock.call_count == 13 assert aioclient_mock.mock_calls[-1][2] == { "addons": ["test"], "folders": ["ssl"], "homeassistant": False, "password": "123456", } async def test_service_calls_core(hassio_env, hass, aioclient_mock): """Call core service and check the API calls behind that.""" assert await async_setup_component(hass, "hassio", {}) aioclient_mock.post("http://127.0.0.1/homeassistant/restart", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/homeassistant/stop", json={"result": "ok"}) await hass.services.async_call("homeassistant", "stop") await hass.async_block_till_done() assert aioclient_mock.call_count == 4 await hass.services.async_call("homeassistant", "check_config") await hass.async_block_till_done() assert aioclient_mock.call_count == 4 with patch( "homeassistant.config.async_check_ha_config_file", return_value=None ) as mock_check_config: await hass.services.async_call("homeassistant", "restart") await hass.async_block_till_done() assert mock_check_config.called assert aioclient_mock.call_count == 5
pschmitt/home-assistant
tests/components/hassio/test_init.py
homeassistant/components/serial_pm/sensor.py
"""Support for Luftdaten sensors.""" import logging from homeassistant.const import ( ATTR_ATTRIBUTION, ATTR_LATITUDE, ATTR_LONGITUDE, CONF_SHOW_ON_MAP, ) from homeassistant.core import callback from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity import Entity from . import ( DATA_LUFTDATEN, DATA_LUFTDATEN_CLIENT, DEFAULT_ATTRIBUTION, DOMAIN, SENSORS, TOPIC_UPDATE, ) from .const import ATTR_SENSOR_ID _LOGGER = logging.getLogger(__name__) async def async_setup_entry(hass, entry, async_add_entities): """Set up a Luftdaten sensor based on a config entry.""" luftdaten = hass.data[DOMAIN][DATA_LUFTDATEN_CLIENT][entry.entry_id] sensors = [] for sensor_type in luftdaten.sensor_conditions: try: name, icon, unit = SENSORS[sensor_type] except KeyError: _LOGGER.debug("Unknown sensor value type: %s", sensor_type) continue sensors.append( LuftdatenSensor( luftdaten, sensor_type, name, icon, unit, entry.data[CONF_SHOW_ON_MAP] ) ) async_add_entities(sensors, True) class LuftdatenSensor(Entity): """Implementation of a Luftdaten sensor.""" def __init__(self, luftdaten, sensor_type, name, icon, unit, show): """Initialize the Luftdaten sensor.""" self._async_unsub_dispatcher_connect = None self.luftdaten = luftdaten self._icon = icon self._name = name self._data = None self.sensor_type = sensor_type self._unit_of_measurement = unit self._show_on_map = show self._attrs = {} @property def icon(self): """Return the icon.""" return self._icon @property def state(self): """Return the state of the device.""" if self._data is not None: return self._data[self.sensor_type] @property def unit_of_measurement(self): """Return the unit of measurement of this entity, if any.""" return self._unit_of_measurement @property def should_poll(self): """Disable polling.""" return False @property def unique_id(self) -> str: """Return a unique, friendly identifier for this entity.""" if self._data is not None: return f"{self._data['sensor_id']}_{self.sensor_type}" @property def device_state_attributes(self): """Return the state attributes.""" self._attrs[ATTR_ATTRIBUTION] = DEFAULT_ATTRIBUTION if self._data is not None: self._attrs[ATTR_SENSOR_ID] = self._data["sensor_id"] on_map = ATTR_LATITUDE, ATTR_LONGITUDE no_map = "lat", "long" lat_format, lon_format = on_map if self._show_on_map else no_map try: self._attrs[lon_format] = self._data["longitude"] self._attrs[lat_format] = self._data["latitude"] return self._attrs except KeyError: return async def async_added_to_hass(self): """Register callbacks.""" @callback def update(): """Update the state.""" self.async_schedule_update_ha_state(True) self._async_unsub_dispatcher_connect = async_dispatcher_connect( self.hass, TOPIC_UPDATE, update ) async def async_will_remove_from_hass(self): """Disconnect dispatcher listener when removed.""" if self._async_unsub_dispatcher_connect: self._async_unsub_dispatcher_connect() async def async_update(self): """Get the latest data and update the state.""" try: self._data = self.luftdaten.data[DATA_LUFTDATEN] except KeyError: return
"""The tests for the hassio component.""" import os import pytest from homeassistant.auth.const import GROUP_ID_ADMIN from homeassistant.components import frontend from homeassistant.components.hassio import STORAGE_KEY from homeassistant.setup import async_setup_component from tests.async_mock import patch MOCK_ENVIRON = {"HASSIO": "127.0.0.1", "HASSIO_TOKEN": "abcdefgh"} @pytest.fixture(autouse=True) def mock_all(aioclient_mock): """Mock all setup requests.""" aioclient_mock.post("http://127.0.0.1/homeassistant/options", json={"result": "ok"}) aioclient_mock.get("http://127.0.0.1/supervisor/ping", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/supervisor/options", json={"result": "ok"}) aioclient_mock.get( "http://127.0.0.1/info", json={ "result": "ok", "data": {"supervisor": "222", "homeassistant": "0.110.0", "hassos": None}, }, ) aioclient_mock.get( "http://127.0.0.1/host/info", json={ "result": "ok", "data": { "result": "ok", "data": { "chassis": "vm", "operating_system": "Debian GNU/Linux 10 (buster)", "kernel": "4.19.0-6-amd64", }, }, }, ) aioclient_mock.get( "http://127.0.0.1/ingress/panels", json={"result": "ok", "data": {"panels": {}}} ) async def test_setup_api_ping(hass, aioclient_mock): """Test setup with API ping.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {}) assert result assert aioclient_mock.call_count == 6 assert hass.components.hassio.get_homeassistant_version() == "0.110.0" assert hass.components.hassio.is_hassio() async def test_setup_api_panel(hass, aioclient_mock): """Test setup with API ping.""" assert await async_setup_component(hass, "frontend", {}) with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {}) assert result panels = hass.data[frontend.DATA_PANELS] assert panels.get("hassio").to_response() == { "component_name": "custom", "icon": "hass:home-assistant", "title": "Supervisor", "url_path": "hassio", "require_admin": True, "config": { "_panel_custom": { "embed_iframe": True, "js_url": "/api/hassio/app/entrypoint.js", "name": "hassio-main", "trust_external": False, } }, } async def test_setup_api_push_api_data(hass, aioclient_mock): """Test setup with API push.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component( hass, "hassio", {"http": {"server_port": 9999}, "hassio": {}} ) assert result assert aioclient_mock.call_count == 6 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 9999 assert aioclient_mock.mock_calls[1][2]["watchdog"] async def test_setup_api_push_api_data_server_host(hass, aioclient_mock): """Test setup with API push with active server host.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component( hass, "hassio", {"http": {"server_port": 9999, "server_host": "127.0.0.1"}, "hassio": {}}, ) assert result assert aioclient_mock.call_count == 6 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 9999 assert not aioclient_mock.mock_calls[1][2]["watchdog"] async def test_setup_api_push_api_data_default(hass, aioclient_mock, hass_storage): """Test setup with API push default data.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"http": {}, "hassio": {}}) assert result assert aioclient_mock.call_count == 6 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 8123 refresh_token = aioclient_mock.mock_calls[1][2]["refresh_token"] hassio_user = await hass.auth.async_get_user( hass_storage[STORAGE_KEY]["data"]["hassio_user"] ) assert hassio_user is not None assert hassio_user.system_generated assert len(hassio_user.groups) == 1 assert hassio_user.groups[0].id == GROUP_ID_ADMIN for token in hassio_user.refresh_tokens.values(): if token.token == refresh_token: break else: assert False, "refresh token not found" async def test_setup_adds_admin_group_to_user(hass, aioclient_mock, hass_storage): """Test setup with API push default data.""" # Create user without admin user = await hass.auth.async_create_system_user("Hass.io") assert not user.is_admin await hass.auth.async_create_refresh_token(user) hass_storage[STORAGE_KEY] = { "data": {"hassio_user": user.id}, "key": STORAGE_KEY, "version": 1, } with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"http": {}, "hassio": {}}) assert result assert user.is_admin async def test_setup_api_existing_hassio_user(hass, aioclient_mock, hass_storage): """Test setup with API push default data.""" user = await hass.auth.async_create_system_user("Hass.io test") token = await hass.auth.async_create_refresh_token(user) hass_storage[STORAGE_KEY] = {"version": 1, "data": {"hassio_user": user.id}} with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"http": {}, "hassio": {}}) assert result assert aioclient_mock.call_count == 6 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 8123 assert aioclient_mock.mock_calls[1][2]["refresh_token"] == token.token async def test_setup_core_push_timezone(hass, aioclient_mock): """Test setup with API push default data.""" hass.config.time_zone = "testzone" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"hassio": {}}) assert result assert aioclient_mock.call_count == 6 assert aioclient_mock.mock_calls[2][2]["timezone"] == "testzone" await hass.config.async_update(time_zone="America/New_York") await hass.async_block_till_done() assert aioclient_mock.mock_calls[-1][2]["timezone"] == "America/New_York" async def test_setup_hassio_no_additional_data(hass, aioclient_mock): """Test setup with API push default data.""" with patch.dict(os.environ, MOCK_ENVIRON), patch.dict( os.environ, {"HASSIO_TOKEN": "123456"} ): result = await async_setup_component(hass, "hassio", {"hassio": {}}) assert result assert aioclient_mock.call_count == 6 assert aioclient_mock.mock_calls[-1][3]["X-Hassio-Key"] == "123456" async def test_fail_setup_without_environ_var(hass): """Fail setup if no environ variable set.""" with patch.dict(os.environ, {}, clear=True): result = await async_setup_component(hass, "hassio", {}) assert not result async def test_warn_when_cannot_connect(hass, caplog): """Fail warn when we cannot connect.""" with patch.dict(os.environ, MOCK_ENVIRON), patch( "homeassistant.components.hassio.HassIO.is_connected", return_value=None, ): result = await async_setup_component(hass, "hassio", {}) assert result assert hass.components.hassio.is_hassio() assert "Not connected with Hass.io / system too busy!" in caplog.text async def test_service_register(hassio_env, hass): """Check if service will be setup.""" assert await async_setup_component(hass, "hassio", {}) assert hass.services.has_service("hassio", "addon_start") assert hass.services.has_service("hassio", "addon_stop") assert hass.services.has_service("hassio", "addon_restart") assert hass.services.has_service("hassio", "addon_stdin") assert hass.services.has_service("hassio", "host_shutdown") assert hass.services.has_service("hassio", "host_reboot") assert hass.services.has_service("hassio", "host_reboot") assert hass.services.has_service("hassio", "snapshot_full") assert hass.services.has_service("hassio", "snapshot_partial") assert hass.services.has_service("hassio", "restore_full") assert hass.services.has_service("hassio", "restore_partial") async def test_service_calls(hassio_env, hass, aioclient_mock): """Call service and check the API calls behind that.""" assert await async_setup_component(hass, "hassio", {}) aioclient_mock.post("http://127.0.0.1/addons/test/start", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/addons/test/stop", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/addons/test/restart", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/addons/test/stdin", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/host/shutdown", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/host/reboot", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/snapshots/new/full", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/snapshots/new/partial", json={"result": "ok"}) aioclient_mock.post( "http://127.0.0.1/snapshots/test/restore/full", json={"result": "ok"} ) aioclient_mock.post( "http://127.0.0.1/snapshots/test/restore/partial", json={"result": "ok"} ) await hass.services.async_call("hassio", "addon_start", {"addon": "test"}) await hass.services.async_call("hassio", "addon_stop", {"addon": "test"}) await hass.services.async_call("hassio", "addon_restart", {"addon": "test"}) await hass.services.async_call( "hassio", "addon_stdin", {"addon": "test", "input": "test"} ) await hass.async_block_till_done() assert aioclient_mock.call_count == 7 assert aioclient_mock.mock_calls[-1][2] == "test" await hass.services.async_call("hassio", "host_shutdown", {}) await hass.services.async_call("hassio", "host_reboot", {}) await hass.async_block_till_done() assert aioclient_mock.call_count == 9 await hass.services.async_call("hassio", "snapshot_full", {}) await hass.services.async_call( "hassio", "snapshot_partial", {"addons": ["test"], "folders": ["ssl"], "password": "123456"}, ) await hass.async_block_till_done() assert aioclient_mock.call_count == 11 assert aioclient_mock.mock_calls[-1][2] == { "addons": ["test"], "folders": ["ssl"], "password": "123456", } await hass.services.async_call("hassio", "restore_full", {"snapshot": "test"}) await hass.services.async_call( "hassio", "restore_partial", { "snapshot": "test", "homeassistant": False, "addons": ["test"], "folders": ["ssl"], "password": "123456", }, ) await hass.async_block_till_done() assert aioclient_mock.call_count == 13 assert aioclient_mock.mock_calls[-1][2] == { "addons": ["test"], "folders": ["ssl"], "homeassistant": False, "password": "123456", } async def test_service_calls_core(hassio_env, hass, aioclient_mock): """Call core service and check the API calls behind that.""" assert await async_setup_component(hass, "hassio", {}) aioclient_mock.post("http://127.0.0.1/homeassistant/restart", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/homeassistant/stop", json={"result": "ok"}) await hass.services.async_call("homeassistant", "stop") await hass.async_block_till_done() assert aioclient_mock.call_count == 4 await hass.services.async_call("homeassistant", "check_config") await hass.async_block_till_done() assert aioclient_mock.call_count == 4 with patch( "homeassistant.config.async_check_ha_config_file", return_value=None ) as mock_check_config: await hass.services.async_call("homeassistant", "restart") await hass.async_block_till_done() assert mock_check_config.called assert aioclient_mock.call_count == 5
pschmitt/home-assistant
tests/components/hassio/test_init.py
homeassistant/components/luftdaten/sensor.py
"""Support for LIRC devices.""" # pylint: disable=no-member, import-error import logging import threading import time import lirc import voluptuous as vol from homeassistant.const import EVENT_HOMEASSISTANT_START, EVENT_HOMEASSISTANT_STOP _LOGGER = logging.getLogger(__name__) BUTTON_NAME = "button_name" DOMAIN = "lirc" EVENT_IR_COMMAND_RECEIVED = "ir_command_received" ICON = "mdi:remote" CONFIG_SCHEMA = vol.Schema({DOMAIN: vol.Schema({})}, extra=vol.ALLOW_EXTRA) def setup(hass, config): """Set up the LIRC capability.""" # blocking=True gives unexpected behavior (multiple responses for 1 press) # also by not blocking, we allow hass to shut down the thread gracefully # on exit. lirc.init("home-assistant", blocking=False) lirc_interface = LircInterface(hass) def _start_lirc(_event): lirc_interface.start() def _stop_lirc(_event): lirc_interface.stopped.set() hass.bus.listen_once(EVENT_HOMEASSISTANT_START, _start_lirc) hass.bus.listen_once(EVENT_HOMEASSISTANT_STOP, _stop_lirc) return True class LircInterface(threading.Thread): """ This interfaces with the lirc daemon to read IR commands. When using lirc in blocking mode, sometimes repeated commands get produced in the next read of a command so we use a thread here to just wait around until a non-empty response is obtained from lirc. """ def __init__(self, hass): """Construct a LIRC interface object.""" threading.Thread.__init__(self) self.daemon = True self.stopped = threading.Event() self.hass = hass def run(self): """Run the loop of the LIRC interface thread.""" _LOGGER.debug("LIRC interface thread started") while not self.stopped.isSet(): try: code = lirc.nextcode() # list; empty if no buttons pressed except lirc.NextCodeError: _LOGGER.warning("Error reading next code from LIRC") code = None # interpret result from python-lirc if code: code = code[0] _LOGGER.info("Got new LIRC code %s", code) self.hass.bus.fire(EVENT_IR_COMMAND_RECEIVED, {BUTTON_NAME: code}) else: time.sleep(0.2) lirc.deinit() _LOGGER.debug("LIRC interface thread stopped")
"""The tests for the hassio component.""" import os import pytest from homeassistant.auth.const import GROUP_ID_ADMIN from homeassistant.components import frontend from homeassistant.components.hassio import STORAGE_KEY from homeassistant.setup import async_setup_component from tests.async_mock import patch MOCK_ENVIRON = {"HASSIO": "127.0.0.1", "HASSIO_TOKEN": "abcdefgh"} @pytest.fixture(autouse=True) def mock_all(aioclient_mock): """Mock all setup requests.""" aioclient_mock.post("http://127.0.0.1/homeassistant/options", json={"result": "ok"}) aioclient_mock.get("http://127.0.0.1/supervisor/ping", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/supervisor/options", json={"result": "ok"}) aioclient_mock.get( "http://127.0.0.1/info", json={ "result": "ok", "data": {"supervisor": "222", "homeassistant": "0.110.0", "hassos": None}, }, ) aioclient_mock.get( "http://127.0.0.1/host/info", json={ "result": "ok", "data": { "result": "ok", "data": { "chassis": "vm", "operating_system": "Debian GNU/Linux 10 (buster)", "kernel": "4.19.0-6-amd64", }, }, }, ) aioclient_mock.get( "http://127.0.0.1/ingress/panels", json={"result": "ok", "data": {"panels": {}}} ) async def test_setup_api_ping(hass, aioclient_mock): """Test setup with API ping.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {}) assert result assert aioclient_mock.call_count == 6 assert hass.components.hassio.get_homeassistant_version() == "0.110.0" assert hass.components.hassio.is_hassio() async def test_setup_api_panel(hass, aioclient_mock): """Test setup with API ping.""" assert await async_setup_component(hass, "frontend", {}) with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {}) assert result panels = hass.data[frontend.DATA_PANELS] assert panels.get("hassio").to_response() == { "component_name": "custom", "icon": "hass:home-assistant", "title": "Supervisor", "url_path": "hassio", "require_admin": True, "config": { "_panel_custom": { "embed_iframe": True, "js_url": "/api/hassio/app/entrypoint.js", "name": "hassio-main", "trust_external": False, } }, } async def test_setup_api_push_api_data(hass, aioclient_mock): """Test setup with API push.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component( hass, "hassio", {"http": {"server_port": 9999}, "hassio": {}} ) assert result assert aioclient_mock.call_count == 6 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 9999 assert aioclient_mock.mock_calls[1][2]["watchdog"] async def test_setup_api_push_api_data_server_host(hass, aioclient_mock): """Test setup with API push with active server host.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component( hass, "hassio", {"http": {"server_port": 9999, "server_host": "127.0.0.1"}, "hassio": {}}, ) assert result assert aioclient_mock.call_count == 6 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 9999 assert not aioclient_mock.mock_calls[1][2]["watchdog"] async def test_setup_api_push_api_data_default(hass, aioclient_mock, hass_storage): """Test setup with API push default data.""" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"http": {}, "hassio": {}}) assert result assert aioclient_mock.call_count == 6 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 8123 refresh_token = aioclient_mock.mock_calls[1][2]["refresh_token"] hassio_user = await hass.auth.async_get_user( hass_storage[STORAGE_KEY]["data"]["hassio_user"] ) assert hassio_user is not None assert hassio_user.system_generated assert len(hassio_user.groups) == 1 assert hassio_user.groups[0].id == GROUP_ID_ADMIN for token in hassio_user.refresh_tokens.values(): if token.token == refresh_token: break else: assert False, "refresh token not found" async def test_setup_adds_admin_group_to_user(hass, aioclient_mock, hass_storage): """Test setup with API push default data.""" # Create user without admin user = await hass.auth.async_create_system_user("Hass.io") assert not user.is_admin await hass.auth.async_create_refresh_token(user) hass_storage[STORAGE_KEY] = { "data": {"hassio_user": user.id}, "key": STORAGE_KEY, "version": 1, } with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"http": {}, "hassio": {}}) assert result assert user.is_admin async def test_setup_api_existing_hassio_user(hass, aioclient_mock, hass_storage): """Test setup with API push default data.""" user = await hass.auth.async_create_system_user("Hass.io test") token = await hass.auth.async_create_refresh_token(user) hass_storage[STORAGE_KEY] = {"version": 1, "data": {"hassio_user": user.id}} with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"http": {}, "hassio": {}}) assert result assert aioclient_mock.call_count == 6 assert not aioclient_mock.mock_calls[1][2]["ssl"] assert aioclient_mock.mock_calls[1][2]["port"] == 8123 assert aioclient_mock.mock_calls[1][2]["refresh_token"] == token.token async def test_setup_core_push_timezone(hass, aioclient_mock): """Test setup with API push default data.""" hass.config.time_zone = "testzone" with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {"hassio": {}}) assert result assert aioclient_mock.call_count == 6 assert aioclient_mock.mock_calls[2][2]["timezone"] == "testzone" await hass.config.async_update(time_zone="America/New_York") await hass.async_block_till_done() assert aioclient_mock.mock_calls[-1][2]["timezone"] == "America/New_York" async def test_setup_hassio_no_additional_data(hass, aioclient_mock): """Test setup with API push default data.""" with patch.dict(os.environ, MOCK_ENVIRON), patch.dict( os.environ, {"HASSIO_TOKEN": "123456"} ): result = await async_setup_component(hass, "hassio", {"hassio": {}}) assert result assert aioclient_mock.call_count == 6 assert aioclient_mock.mock_calls[-1][3]["X-Hassio-Key"] == "123456" async def test_fail_setup_without_environ_var(hass): """Fail setup if no environ variable set.""" with patch.dict(os.environ, {}, clear=True): result = await async_setup_component(hass, "hassio", {}) assert not result async def test_warn_when_cannot_connect(hass, caplog): """Fail warn when we cannot connect.""" with patch.dict(os.environ, MOCK_ENVIRON), patch( "homeassistant.components.hassio.HassIO.is_connected", return_value=None, ): result = await async_setup_component(hass, "hassio", {}) assert result assert hass.components.hassio.is_hassio() assert "Not connected with Hass.io / system too busy!" in caplog.text async def test_service_register(hassio_env, hass): """Check if service will be setup.""" assert await async_setup_component(hass, "hassio", {}) assert hass.services.has_service("hassio", "addon_start") assert hass.services.has_service("hassio", "addon_stop") assert hass.services.has_service("hassio", "addon_restart") assert hass.services.has_service("hassio", "addon_stdin") assert hass.services.has_service("hassio", "host_shutdown") assert hass.services.has_service("hassio", "host_reboot") assert hass.services.has_service("hassio", "host_reboot") assert hass.services.has_service("hassio", "snapshot_full") assert hass.services.has_service("hassio", "snapshot_partial") assert hass.services.has_service("hassio", "restore_full") assert hass.services.has_service("hassio", "restore_partial") async def test_service_calls(hassio_env, hass, aioclient_mock): """Call service and check the API calls behind that.""" assert await async_setup_component(hass, "hassio", {}) aioclient_mock.post("http://127.0.0.1/addons/test/start", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/addons/test/stop", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/addons/test/restart", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/addons/test/stdin", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/host/shutdown", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/host/reboot", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/snapshots/new/full", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/snapshots/new/partial", json={"result": "ok"}) aioclient_mock.post( "http://127.0.0.1/snapshots/test/restore/full", json={"result": "ok"} ) aioclient_mock.post( "http://127.0.0.1/snapshots/test/restore/partial", json={"result": "ok"} ) await hass.services.async_call("hassio", "addon_start", {"addon": "test"}) await hass.services.async_call("hassio", "addon_stop", {"addon": "test"}) await hass.services.async_call("hassio", "addon_restart", {"addon": "test"}) await hass.services.async_call( "hassio", "addon_stdin", {"addon": "test", "input": "test"} ) await hass.async_block_till_done() assert aioclient_mock.call_count == 7 assert aioclient_mock.mock_calls[-1][2] == "test" await hass.services.async_call("hassio", "host_shutdown", {}) await hass.services.async_call("hassio", "host_reboot", {}) await hass.async_block_till_done() assert aioclient_mock.call_count == 9 await hass.services.async_call("hassio", "snapshot_full", {}) await hass.services.async_call( "hassio", "snapshot_partial", {"addons": ["test"], "folders": ["ssl"], "password": "123456"}, ) await hass.async_block_till_done() assert aioclient_mock.call_count == 11 assert aioclient_mock.mock_calls[-1][2] == { "addons": ["test"], "folders": ["ssl"], "password": "123456", } await hass.services.async_call("hassio", "restore_full", {"snapshot": "test"}) await hass.services.async_call( "hassio", "restore_partial", { "snapshot": "test", "homeassistant": False, "addons": ["test"], "folders": ["ssl"], "password": "123456", }, ) await hass.async_block_till_done() assert aioclient_mock.call_count == 13 assert aioclient_mock.mock_calls[-1][2] == { "addons": ["test"], "folders": ["ssl"], "homeassistant": False, "password": "123456", } async def test_service_calls_core(hassio_env, hass, aioclient_mock): """Call core service and check the API calls behind that.""" assert await async_setup_component(hass, "hassio", {}) aioclient_mock.post("http://127.0.0.1/homeassistant/restart", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/homeassistant/stop", json={"result": "ok"}) await hass.services.async_call("homeassistant", "stop") await hass.async_block_till_done() assert aioclient_mock.call_count == 4 await hass.services.async_call("homeassistant", "check_config") await hass.async_block_till_done() assert aioclient_mock.call_count == 4 with patch( "homeassistant.config.async_check_ha_config_file", return_value=None ) as mock_check_config: await hass.services.async_call("homeassistant", "restart") await hass.async_block_till_done() assert mock_check_config.called assert aioclient_mock.call_count == 5
pschmitt/home-assistant
tests/components/hassio/test_init.py
homeassistant/components/lirc/__init__.py
from warnings import catch_warnings import numpy as np from pandas.core.dtypes import generic as gt import pandas as pd import pandas._testing as tm class TestABCClasses: tuples = [[1, 2, 2], ["red", "blue", "red"]] multi_index = pd.MultiIndex.from_arrays(tuples, names=("number", "color")) datetime_index = pd.to_datetime(["2000/1/1", "2010/1/1"]) timedelta_index = pd.to_timedelta(np.arange(5), unit="s") period_index = pd.period_range("2000/1/1", "2010/1/1/", freq="M") categorical = pd.Categorical([1, 2, 3], categories=[2, 3, 1]) categorical_df = pd.DataFrame({"values": [1, 2, 3]}, index=categorical) df = pd.DataFrame({"names": ["a", "b", "c"]}, index=multi_index) sparse_array = pd.arrays.SparseArray(np.random.randn(10)) datetime_array = pd.core.arrays.DatetimeArray(datetime_index) timedelta_array = pd.core.arrays.TimedeltaArray(timedelta_index) def test_abc_types(self): assert isinstance(pd.Index(["a", "b", "c"]), gt.ABCIndex) assert isinstance(pd.Int64Index([1, 2, 3]), gt.ABCInt64Index) assert isinstance(pd.UInt64Index([1, 2, 3]), gt.ABCUInt64Index) assert isinstance(pd.Float64Index([1, 2, 3]), gt.ABCFloat64Index) assert isinstance(self.multi_index, gt.ABCMultiIndex) assert isinstance(self.datetime_index, gt.ABCDatetimeIndex) assert isinstance(self.timedelta_index, gt.ABCTimedeltaIndex) assert isinstance(self.period_index, gt.ABCPeriodIndex) assert isinstance(self.categorical_df.index, gt.ABCCategoricalIndex) assert isinstance(pd.Index(["a", "b", "c"]), gt.ABCIndexClass) assert isinstance(pd.Int64Index([1, 2, 3]), gt.ABCIndexClass) assert isinstance(pd.Series([1, 2, 3]), gt.ABCSeries) assert isinstance(self.df, gt.ABCDataFrame) assert isinstance(self.sparse_array, gt.ABCExtensionArray) assert isinstance(self.categorical, gt.ABCCategorical) assert isinstance(self.datetime_array, gt.ABCDatetimeArray) assert not isinstance(self.datetime_index, gt.ABCDatetimeArray) assert isinstance(self.timedelta_array, gt.ABCTimedeltaArray) assert not isinstance(self.timedelta_index, gt.ABCTimedeltaArray) def test_setattr_warnings(): # GH7175 - GOTCHA: You can't use dot notation to add a column... d = { "one": pd.Series([1.0, 2.0, 3.0], index=["a", "b", "c"]), "two": pd.Series([1.0, 2.0, 3.0, 4.0], index=["a", "b", "c", "d"]), } df = pd.DataFrame(d) with catch_warnings(record=True) as w: # successfully add new column # this should not raise a warning df["three"] = df.two + 1 assert len(w) == 0 assert df.three.sum() > df.two.sum() with catch_warnings(record=True) as w: # successfully modify column in place # this should not raise a warning df.one += 1 assert len(w) == 0 assert df.one.iloc[0] == 2 with catch_warnings(record=True) as w: # successfully add an attribute to a series # this should not raise a warning df.two.not_an_index = [1, 2] assert len(w) == 0 with tm.assert_produces_warning(UserWarning): # warn when setting column to nonexistent name df.four = df.two + 2 assert df.four.sum() > df.two.sum()
import numpy as np import pytest from pandas import Categorical, Index, MultiIndex, NaT import pandas._testing as tm def test_index_equal_levels_mismatch(): msg = """Index are different Index levels are different \\[left\\]: 1, Int64Index\\(\\[1, 2, 3\\], dtype='int64'\\) \\[right\\]: 2, MultiIndex\\(\\[\\('A', 1\\), \\('A', 2\\), \\('B', 3\\), \\('B', 4\\)\\], \\)""" idx1 = Index([1, 2, 3]) idx2 = MultiIndex.from_tuples([("A", 1), ("A", 2), ("B", 3), ("B", 4)]) with pytest.raises(AssertionError, match=msg): tm.assert_index_equal(idx1, idx2, exact=False) def test_index_equal_values_mismatch(check_exact): msg = """MultiIndex level \\[1\\] are different MultiIndex level \\[1\\] values are different \\(25\\.0 %\\) \\[left\\]: Int64Index\\(\\[2, 2, 3, 4\\], dtype='int64'\\) \\[right\\]: Int64Index\\(\\[1, 2, 3, 4\\], dtype='int64'\\)""" idx1 = MultiIndex.from_tuples([("A", 2), ("A", 2), ("B", 3), ("B", 4)]) idx2 = MultiIndex.from_tuples([("A", 1), ("A", 2), ("B", 3), ("B", 4)]) with pytest.raises(AssertionError, match=msg): tm.assert_index_equal(idx1, idx2, check_exact=check_exact) def test_index_equal_length_mismatch(check_exact): msg = """Index are different Index length are different \\[left\\]: 3, Int64Index\\(\\[1, 2, 3\\], dtype='int64'\\) \\[right\\]: 4, Int64Index\\(\\[1, 2, 3, 4\\], dtype='int64'\\)""" idx1 = Index([1, 2, 3]) idx2 = Index([1, 2, 3, 4]) with pytest.raises(AssertionError, match=msg): tm.assert_index_equal(idx1, idx2, check_exact=check_exact) def test_index_equal_class_mismatch(check_exact): msg = """Index are different Index classes are different \\[left\\]: Int64Index\\(\\[1, 2, 3\\], dtype='int64'\\) \\[right\\]: Float64Index\\(\\[1\\.0, 2\\.0, 3\\.0\\], dtype='float64'\\)""" idx1 = Index([1, 2, 3]) idx2 = Index([1, 2, 3.0]) with pytest.raises(AssertionError, match=msg): tm.assert_index_equal(idx1, idx2, exact=True, check_exact=check_exact) def test_index_equal_values_close(check_exact): idx1 = Index([1, 2, 3.0]) idx2 = Index([1, 2, 3.0000000001]) if check_exact: msg = """Index are different Index values are different \\(33\\.33333 %\\) \\[left\\]: Float64Index\\(\\[1.0, 2.0, 3.0], dtype='float64'\\) \\[right\\]: Float64Index\\(\\[1.0, 2.0, 3.0000000001\\], dtype='float64'\\)""" with pytest.raises(AssertionError, match=msg): tm.assert_index_equal(idx1, idx2, check_exact=check_exact) else: tm.assert_index_equal(idx1, idx2, check_exact=check_exact) def test_index_equal_values_less_close(check_exact, check_less_precise): idx1 = Index([1, 2, 3.0]) idx2 = Index([1, 2, 3.0001]) kwargs = dict(check_exact=check_exact, check_less_precise=check_less_precise) if check_exact or not check_less_precise: msg = """Index are different Index values are different \\(33\\.33333 %\\) \\[left\\]: Float64Index\\(\\[1.0, 2.0, 3.0], dtype='float64'\\) \\[right\\]: Float64Index\\(\\[1.0, 2.0, 3.0001\\], dtype='float64'\\)""" with pytest.raises(AssertionError, match=msg): tm.assert_index_equal(idx1, idx2, **kwargs) else: tm.assert_index_equal(idx1, idx2, **kwargs) def test_index_equal_values_too_far(check_exact, check_less_precise): idx1 = Index([1, 2, 3]) idx2 = Index([1, 2, 4]) kwargs = dict(check_exact=check_exact, check_less_precise=check_less_precise) msg = """Index are different Index values are different \\(33\\.33333 %\\) \\[left\\]: Int64Index\\(\\[1, 2, 3\\], dtype='int64'\\) \\[right\\]: Int64Index\\(\\[1, 2, 4\\], dtype='int64'\\)""" with pytest.raises(AssertionError, match=msg): tm.assert_index_equal(idx1, idx2, **kwargs) def test_index_equal_level_values_mismatch(check_exact, check_less_precise): idx1 = MultiIndex.from_tuples([("A", 2), ("A", 2), ("B", 3), ("B", 4)]) idx2 = MultiIndex.from_tuples([("A", 1), ("A", 2), ("B", 3), ("B", 4)]) kwargs = dict(check_exact=check_exact, check_less_precise=check_less_precise) msg = """MultiIndex level \\[1\\] are different MultiIndex level \\[1\\] values are different \\(25\\.0 %\\) \\[left\\]: Int64Index\\(\\[2, 2, 3, 4\\], dtype='int64'\\) \\[right\\]: Int64Index\\(\\[1, 2, 3, 4\\], dtype='int64'\\)""" with pytest.raises(AssertionError, match=msg): tm.assert_index_equal(idx1, idx2, **kwargs) @pytest.mark.parametrize( "name1,name2", [(None, "x"), ("x", "x"), (np.nan, np.nan), (NaT, NaT), (np.nan, NaT)], ) def test_index_equal_names(name1, name2): idx1 = Index([1, 2, 3], name=name1) idx2 = Index([1, 2, 3], name=name2) if name1 == name2 or name1 is name2: tm.assert_index_equal(idx1, idx2) else: name1 = "'x'" if name1 == "x" else name1 name2 = "'x'" if name2 == "x" else name2 msg = f"""Index are different Attribute "names" are different \\[left\\]: \\[{name1}\\] \\[right\\]: \\[{name2}\\]""" with pytest.raises(AssertionError, match=msg): tm.assert_index_equal(idx1, idx2) def test_index_equal_category_mismatch(check_categorical): msg = """Index are different Attribute "dtype" are different \\[left\\]: CategoricalDtype\\(categories=\\['a', 'b'\\], ordered=False\\) \\[right\\]: CategoricalDtype\\(categories=\\['a', 'b', 'c'\\], \ ordered=False\\)""" idx1 = Index(Categorical(["a", "b"])) idx2 = Index(Categorical(["a", "b"], categories=["a", "b", "c"])) if check_categorical: with pytest.raises(AssertionError, match=msg): tm.assert_index_equal(idx1, idx2, check_categorical=check_categorical) else: tm.assert_index_equal(idx1, idx2, check_categorical=check_categorical)
TomAugspurger/pandas
pandas/tests/util/test_assert_index_equal.py
pandas/tests/dtypes/test_generic.py
import pandas as pd import pandas._testing as tm class TestUnaryOps: def test_invert(self): a = pd.array([True, False, None], dtype="boolean") expected = pd.array([False, True, None], dtype="boolean") tm.assert_extension_array_equal(~a, expected) expected = pd.Series(expected, index=["a", "b", "c"], name="name") result = ~pd.Series(a, index=["a", "b", "c"], name="name") tm.assert_series_equal(result, expected) df = pd.DataFrame({"A": a, "B": [True, False, False]}, index=["a", "b", "c"]) result = ~df expected = pd.DataFrame( {"A": expected, "B": [False, True, True]}, index=["a", "b", "c"] ) tm.assert_frame_equal(result, expected)
import numpy as np import pytest from pandas import Categorical, Index, MultiIndex, NaT import pandas._testing as tm def test_index_equal_levels_mismatch(): msg = """Index are different Index levels are different \\[left\\]: 1, Int64Index\\(\\[1, 2, 3\\], dtype='int64'\\) \\[right\\]: 2, MultiIndex\\(\\[\\('A', 1\\), \\('A', 2\\), \\('B', 3\\), \\('B', 4\\)\\], \\)""" idx1 = Index([1, 2, 3]) idx2 = MultiIndex.from_tuples([("A", 1), ("A", 2), ("B", 3), ("B", 4)]) with pytest.raises(AssertionError, match=msg): tm.assert_index_equal(idx1, idx2, exact=False) def test_index_equal_values_mismatch(check_exact): msg = """MultiIndex level \\[1\\] are different MultiIndex level \\[1\\] values are different \\(25\\.0 %\\) \\[left\\]: Int64Index\\(\\[2, 2, 3, 4\\], dtype='int64'\\) \\[right\\]: Int64Index\\(\\[1, 2, 3, 4\\], dtype='int64'\\)""" idx1 = MultiIndex.from_tuples([("A", 2), ("A", 2), ("B", 3), ("B", 4)]) idx2 = MultiIndex.from_tuples([("A", 1), ("A", 2), ("B", 3), ("B", 4)]) with pytest.raises(AssertionError, match=msg): tm.assert_index_equal(idx1, idx2, check_exact=check_exact) def test_index_equal_length_mismatch(check_exact): msg = """Index are different Index length are different \\[left\\]: 3, Int64Index\\(\\[1, 2, 3\\], dtype='int64'\\) \\[right\\]: 4, Int64Index\\(\\[1, 2, 3, 4\\], dtype='int64'\\)""" idx1 = Index([1, 2, 3]) idx2 = Index([1, 2, 3, 4]) with pytest.raises(AssertionError, match=msg): tm.assert_index_equal(idx1, idx2, check_exact=check_exact) def test_index_equal_class_mismatch(check_exact): msg = """Index are different Index classes are different \\[left\\]: Int64Index\\(\\[1, 2, 3\\], dtype='int64'\\) \\[right\\]: Float64Index\\(\\[1\\.0, 2\\.0, 3\\.0\\], dtype='float64'\\)""" idx1 = Index([1, 2, 3]) idx2 = Index([1, 2, 3.0]) with pytest.raises(AssertionError, match=msg): tm.assert_index_equal(idx1, idx2, exact=True, check_exact=check_exact) def test_index_equal_values_close(check_exact): idx1 = Index([1, 2, 3.0]) idx2 = Index([1, 2, 3.0000000001]) if check_exact: msg = """Index are different Index values are different \\(33\\.33333 %\\) \\[left\\]: Float64Index\\(\\[1.0, 2.0, 3.0], dtype='float64'\\) \\[right\\]: Float64Index\\(\\[1.0, 2.0, 3.0000000001\\], dtype='float64'\\)""" with pytest.raises(AssertionError, match=msg): tm.assert_index_equal(idx1, idx2, check_exact=check_exact) else: tm.assert_index_equal(idx1, idx2, check_exact=check_exact) def test_index_equal_values_less_close(check_exact, check_less_precise): idx1 = Index([1, 2, 3.0]) idx2 = Index([1, 2, 3.0001]) kwargs = dict(check_exact=check_exact, check_less_precise=check_less_precise) if check_exact or not check_less_precise: msg = """Index are different Index values are different \\(33\\.33333 %\\) \\[left\\]: Float64Index\\(\\[1.0, 2.0, 3.0], dtype='float64'\\) \\[right\\]: Float64Index\\(\\[1.0, 2.0, 3.0001\\], dtype='float64'\\)""" with pytest.raises(AssertionError, match=msg): tm.assert_index_equal(idx1, idx2, **kwargs) else: tm.assert_index_equal(idx1, idx2, **kwargs) def test_index_equal_values_too_far(check_exact, check_less_precise): idx1 = Index([1, 2, 3]) idx2 = Index([1, 2, 4]) kwargs = dict(check_exact=check_exact, check_less_precise=check_less_precise) msg = """Index are different Index values are different \\(33\\.33333 %\\) \\[left\\]: Int64Index\\(\\[1, 2, 3\\], dtype='int64'\\) \\[right\\]: Int64Index\\(\\[1, 2, 4\\], dtype='int64'\\)""" with pytest.raises(AssertionError, match=msg): tm.assert_index_equal(idx1, idx2, **kwargs) def test_index_equal_level_values_mismatch(check_exact, check_less_precise): idx1 = MultiIndex.from_tuples([("A", 2), ("A", 2), ("B", 3), ("B", 4)]) idx2 = MultiIndex.from_tuples([("A", 1), ("A", 2), ("B", 3), ("B", 4)]) kwargs = dict(check_exact=check_exact, check_less_precise=check_less_precise) msg = """MultiIndex level \\[1\\] are different MultiIndex level \\[1\\] values are different \\(25\\.0 %\\) \\[left\\]: Int64Index\\(\\[2, 2, 3, 4\\], dtype='int64'\\) \\[right\\]: Int64Index\\(\\[1, 2, 3, 4\\], dtype='int64'\\)""" with pytest.raises(AssertionError, match=msg): tm.assert_index_equal(idx1, idx2, **kwargs) @pytest.mark.parametrize( "name1,name2", [(None, "x"), ("x", "x"), (np.nan, np.nan), (NaT, NaT), (np.nan, NaT)], ) def test_index_equal_names(name1, name2): idx1 = Index([1, 2, 3], name=name1) idx2 = Index([1, 2, 3], name=name2) if name1 == name2 or name1 is name2: tm.assert_index_equal(idx1, idx2) else: name1 = "'x'" if name1 == "x" else name1 name2 = "'x'" if name2 == "x" else name2 msg = f"""Index are different Attribute "names" are different \\[left\\]: \\[{name1}\\] \\[right\\]: \\[{name2}\\]""" with pytest.raises(AssertionError, match=msg): tm.assert_index_equal(idx1, idx2) def test_index_equal_category_mismatch(check_categorical): msg = """Index are different Attribute "dtype" are different \\[left\\]: CategoricalDtype\\(categories=\\['a', 'b'\\], ordered=False\\) \\[right\\]: CategoricalDtype\\(categories=\\['a', 'b', 'c'\\], \ ordered=False\\)""" idx1 = Index(Categorical(["a", "b"])) idx2 = Index(Categorical(["a", "b"], categories=["a", "b", "c"])) if check_categorical: with pytest.raises(AssertionError, match=msg): tm.assert_index_equal(idx1, idx2, check_categorical=check_categorical) else: tm.assert_index_equal(idx1, idx2, check_categorical=check_categorical)
TomAugspurger/pandas
pandas/tests/util/test_assert_index_equal.py
pandas/tests/arrays/boolean/test_ops.py
from contextlib import contextmanager from pandas.plotting._core import _get_plot_backend def table(ax, data, rowLabels=None, colLabels=None, **kwargs): """ Helper function to convert DataFrame and Series to matplotlib.table. Parameters ---------- ax : Matplotlib axes object data : DataFrame or Series Data for table contents. **kwargs Keyword arguments to be passed to matplotlib.table.table. If `rowLabels` or `colLabels` is not specified, data index or column name will be used. Returns ------- matplotlib table object """ plot_backend = _get_plot_backend("matplotlib") return plot_backend.table( ax=ax, data=data, rowLabels=None, colLabels=None, **kwargs ) def register(): """ Register pandas formatters and converters with matplotlib. This function modifies the global ``matplotlib.units.registry`` dictionary. pandas adds custom converters for * pd.Timestamp * pd.Period * np.datetime64 * datetime.datetime * datetime.date * datetime.time See Also -------- deregister_matplotlib_converters : Remove pandas formatters and converters. """ plot_backend = _get_plot_backend("matplotlib") plot_backend.register() def deregister(): """ Remove pandas formatters and converters. Removes the custom converters added by :func:`register`. This attempts to set the state of the registry back to the state before pandas registered its own units. Converters for pandas' own types like Timestamp and Period are removed completely. Converters for types pandas overwrites, like ``datetime.datetime``, are restored to their original value. See Also -------- register_matplotlib_converters : Register pandas formatters and converters with matplotlib. """ plot_backend = _get_plot_backend("matplotlib") plot_backend.deregister() def scatter_matrix( frame, alpha=0.5, figsize=None, ax=None, grid=False, diagonal="hist", marker=".", density_kwds=None, hist_kwds=None, range_padding=0.05, **kwargs, ): """ Draw a matrix of scatter plots. Parameters ---------- frame : DataFrame alpha : float, optional Amount of transparency applied. figsize : (float,float), optional A tuple (width, height) in inches. ax : Matplotlib axis object, optional grid : bool, optional Setting this to True will show the grid. diagonal : {'hist', 'kde'} Pick between 'kde' and 'hist' for either Kernel Density Estimation or Histogram plot in the diagonal. marker : str, optional Matplotlib marker type, default '.'. density_kwds : keywords Keyword arguments to be passed to kernel density estimate plot. hist_kwds : keywords Keyword arguments to be passed to hist function. range_padding : float, default 0.05 Relative extension of axis range in x and y with respect to (x_max - x_min) or (y_max - y_min). **kwargs Keyword arguments to be passed to scatter function. Returns ------- numpy.ndarray A matrix of scatter plots. Examples -------- .. plot:: :context: close-figs >>> df = pd.DataFrame(np.random.randn(1000, 4), columns=['A','B','C','D']) >>> pd.plotting.scatter_matrix(df, alpha=0.2) """ plot_backend = _get_plot_backend("matplotlib") return plot_backend.scatter_matrix( frame=frame, alpha=alpha, figsize=figsize, ax=ax, grid=grid, diagonal=diagonal, marker=marker, density_kwds=density_kwds, hist_kwds=hist_kwds, range_padding=range_padding, **kwargs, ) def radviz(frame, class_column, ax=None, color=None, colormap=None, **kwds): """ Plot a multidimensional dataset in 2D. Each Series in the DataFrame is represented as a evenly distributed slice on a circle. Each data point is rendered in the circle according to the value on each Series. Highly correlated `Series` in the `DataFrame` are placed closer on the unit circle. RadViz allow to project a N-dimensional data set into a 2D space where the influence of each dimension can be interpreted as a balance between the influence of all dimensions. More info available at the `original article <https://citeseerx.ist.psu.edu/viewdoc/summary?doi=10.1.1.135.889>`_ describing RadViz. Parameters ---------- frame : `DataFrame` pandas object holding the data. class_column : str Column name containing the name of the data point category. ax : :class:`matplotlib.axes.Axes`, optional A plot instance to which to add the information. color : list[str] or tuple[str], optional Assign a color to each category. Example: ['blue', 'green']. colormap : str or :class:`matplotlib.colors.Colormap`, default None Colormap to select colors from. If string, load colormap with that name from matplotlib. **kwds Options to pass to matplotlib scatter plotting method. Returns ------- class:`matplotlib.axes.Axes` See Also -------- plotting.andrews_curves : Plot clustering visualization. Examples -------- .. plot:: :context: close-figs >>> df = pd.DataFrame( ... { ... 'SepalLength': [6.5, 7.7, 5.1, 5.8, 7.6, 5.0, 5.4, 4.6, 6.7, 4.6], ... 'SepalWidth': [3.0, 3.8, 3.8, 2.7, 3.0, 2.3, 3.0, 3.2, 3.3, 3.6], ... 'PetalLength': [5.5, 6.7, 1.9, 5.1, 6.6, 3.3, 4.5, 1.4, 5.7, 1.0], ... 'PetalWidth': [1.8, 2.2, 0.4, 1.9, 2.1, 1.0, 1.5, 0.2, 2.1, 0.2], ... 'Category': [ ... 'virginica', ... 'virginica', ... 'setosa', ... 'virginica', ... 'virginica', ... 'versicolor', ... 'versicolor', ... 'setosa', ... 'virginica', ... 'setosa' ... ] ... } ... ) >>> pd.plotting.radviz(df, 'Category') """ plot_backend = _get_plot_backend("matplotlib") return plot_backend.radviz( frame=frame, class_column=class_column, ax=ax, color=color, colormap=colormap, **kwds, ) def andrews_curves( frame, class_column, ax=None, samples=200, color=None, colormap=None, **kwargs ): """ Generate a matplotlib plot of Andrews curves, for visualising clusters of multivariate data. Andrews curves have the functional form: f(t) = x_1/sqrt(2) + x_2 sin(t) + x_3 cos(t) + x_4 sin(2t) + x_5 cos(2t) + ... Where x coefficients correspond to the values of each dimension and t is linearly spaced between -pi and +pi. Each row of frame then corresponds to a single curve. Parameters ---------- frame : DataFrame Data to be plotted, preferably normalized to (0.0, 1.0). class_column : Name of the column containing class names ax : matplotlib axes object, default None samples : Number of points to plot in each curve color : list or tuple, optional Colors to use for the different classes. colormap : str or matplotlib colormap object, default None Colormap to select colors from. If string, load colormap with that name from matplotlib. **kwargs Options to pass to matplotlib plotting method. Returns ------- class:`matplotlip.axis.Axes` Examples -------- .. plot:: :context: close-figs >>> df = pd.read_csv( ... 'https://raw.github.com/pandas-dev/' ... 'pandas/master/pandas/tests/io/data/csv/iris.csv' ... ) >>> pd.plotting.andrews_curves(df, 'Name') """ plot_backend = _get_plot_backend("matplotlib") return plot_backend.andrews_curves( frame=frame, class_column=class_column, ax=ax, samples=samples, color=color, colormap=colormap, **kwargs, ) def bootstrap_plot(series, fig=None, size=50, samples=500, **kwds): """ Bootstrap plot on mean, median and mid-range statistics. The bootstrap plot is used to estimate the uncertainty of a statistic by relaying on random sampling with replacement [1]_. This function will generate bootstrapping plots for mean, median and mid-range statistics for the given number of samples of the given size. .. [1] "Bootstrapping (statistics)" in \ https://en.wikipedia.org/wiki/Bootstrapping_%28statistics%29 Parameters ---------- series : pandas.Series pandas Series from where to get the samplings for the bootstrapping. fig : matplotlib.figure.Figure, default None If given, it will use the `fig` reference for plotting instead of creating a new one with default parameters. size : int, default 50 Number of data points to consider during each sampling. It must be greater or equal than the length of the `series`. samples : int, default 500 Number of times the bootstrap procedure is performed. **kwds Options to pass to matplotlib plotting method. Returns ------- matplotlib.figure.Figure Matplotlib figure. See Also -------- DataFrame.plot : Basic plotting for DataFrame objects. Series.plot : Basic plotting for Series objects. Examples -------- This example draws a basic bootstap plot for a Series. .. plot:: :context: close-figs >>> s = pd.Series(np.random.uniform(size=100)) >>> pd.plotting.bootstrap_plot(s) """ plot_backend = _get_plot_backend("matplotlib") return plot_backend.bootstrap_plot( series=series, fig=fig, size=size, samples=samples, **kwds ) def parallel_coordinates( frame, class_column, cols=None, ax=None, color=None, use_columns=False, xticks=None, colormap=None, axvlines=True, axvlines_kwds=None, sort_labels=False, **kwargs, ): """ Parallel coordinates plotting. Parameters ---------- frame : DataFrame class_column : str Column name containing class names. cols : list, optional A list of column names to use. ax : matplotlib.axis, optional Matplotlib axis object. color : list or tuple, optional Colors to use for the different classes. use_columns : bool, optional If true, columns will be used as xticks. xticks : list or tuple, optional A list of values to use for xticks. colormap : str or matplotlib colormap, default None Colormap to use for line colors. axvlines : bool, optional If true, vertical lines will be added at each xtick. axvlines_kwds : keywords, optional Options to be passed to axvline method for vertical lines. sort_labels : bool, default False Sort class_column labels, useful when assigning colors. **kwargs Options to pass to matplotlib plotting method. Returns ------- class:`matplotlib.axis.Axes` Examples -------- .. plot:: :context: close-figs >>> df = pd.read_csv( ... 'https://raw.github.com/pandas-dev/' ... 'pandas/master/pandas/tests/io/data/csv/iris.csv' ... ) >>> pd.plotting.parallel_coordinates( ... df, 'Name', color=('#556270', '#4ECDC4', '#C7F464') ... ) """ plot_backend = _get_plot_backend("matplotlib") return plot_backend.parallel_coordinates( frame=frame, class_column=class_column, cols=cols, ax=ax, color=color, use_columns=use_columns, xticks=xticks, colormap=colormap, axvlines=axvlines, axvlines_kwds=axvlines_kwds, sort_labels=sort_labels, **kwargs, ) def lag_plot(series, lag=1, ax=None, **kwds): """ Lag plot for time series. Parameters ---------- series : Time series lag : lag of the scatter plot, default 1 ax : Matplotlib axis object, optional **kwds Matplotlib scatter method keyword arguments. Returns ------- class:`matplotlib.axis.Axes` Examples -------- Lag plots are most commonly used to look for patterns in time series data. Given the following time series .. plot:: :context: close-figs >>> np.random.seed(5) >>> x = np.cumsum(np.random.normal(loc=1, scale=5, size=50)) >>> s = pd.Series(x) >>> s.plot() A lag plot with ``lag=1`` returns .. plot:: :context: close-figs >>> pd.plotting.lag_plot(s, lag=1) """ plot_backend = _get_plot_backend("matplotlib") return plot_backend.lag_plot(series=series, lag=lag, ax=ax, **kwds) def autocorrelation_plot(series, ax=None, **kwargs): """ Autocorrelation plot for time series. Parameters ---------- series : Time series ax : Matplotlib axis object, optional **kwargs Options to pass to matplotlib plotting method. Returns ------- class:`matplotlib.axis.Axes` Examples -------- The horizontal lines in the plot correspond to 95% and 99% confidence bands. The dashed line is 99% confidence band. .. plot:: :context: close-figs >>> spacing = np.linspace(-9 * np.pi, 9 * np.pi, num=1000) >>> s = pd.Series(0.7 * np.random.rand(1000) + 0.3 * np.sin(spacing)) >>> pd.plotting.autocorrelation_plot(s) """ plot_backend = _get_plot_backend("matplotlib") return plot_backend.autocorrelation_plot(series=series, ax=ax, **kwargs) class _Options(dict): """ Stores pandas plotting options. Allows for parameter aliasing so you can just use parameter names that are the same as the plot function parameters, but is stored in a canonical format that makes it easy to breakdown into groups later. """ # alias so the names are same as plotting method parameter names _ALIASES = {"x_compat": "xaxis.compat"} _DEFAULT_KEYS = ["xaxis.compat"] def __init__(self, deprecated=False): self._deprecated = deprecated super().__setitem__("xaxis.compat", False) def __getitem__(self, key): key = self._get_canonical_key(key) if key not in self: raise ValueError(f"{key} is not a valid pandas plotting option") return super().__getitem__(key) def __setitem__(self, key, value): key = self._get_canonical_key(key) return super().__setitem__(key, value) def __delitem__(self, key): key = self._get_canonical_key(key) if key in self._DEFAULT_KEYS: raise ValueError(f"Cannot remove default parameter {key}") return super().__delitem__(key) def __contains__(self, key) -> bool: key = self._get_canonical_key(key) return super().__contains__(key) def reset(self): """ Reset the option store to its initial state Returns ------- None """ self.__init__() def _get_canonical_key(self, key): return self._ALIASES.get(key, key) @contextmanager def use(self, key, value): """ Temporarily set a parameter value using the with statement. Aliasing allowed. """ old_value = self[key] try: self[key] = value yield self finally: self[key] = old_value plot_params = _Options()
import numpy as np import pytest from pandas import Categorical, Index, MultiIndex, NaT import pandas._testing as tm def test_index_equal_levels_mismatch(): msg = """Index are different Index levels are different \\[left\\]: 1, Int64Index\\(\\[1, 2, 3\\], dtype='int64'\\) \\[right\\]: 2, MultiIndex\\(\\[\\('A', 1\\), \\('A', 2\\), \\('B', 3\\), \\('B', 4\\)\\], \\)""" idx1 = Index([1, 2, 3]) idx2 = MultiIndex.from_tuples([("A", 1), ("A", 2), ("B", 3), ("B", 4)]) with pytest.raises(AssertionError, match=msg): tm.assert_index_equal(idx1, idx2, exact=False) def test_index_equal_values_mismatch(check_exact): msg = """MultiIndex level \\[1\\] are different MultiIndex level \\[1\\] values are different \\(25\\.0 %\\) \\[left\\]: Int64Index\\(\\[2, 2, 3, 4\\], dtype='int64'\\) \\[right\\]: Int64Index\\(\\[1, 2, 3, 4\\], dtype='int64'\\)""" idx1 = MultiIndex.from_tuples([("A", 2), ("A", 2), ("B", 3), ("B", 4)]) idx2 = MultiIndex.from_tuples([("A", 1), ("A", 2), ("B", 3), ("B", 4)]) with pytest.raises(AssertionError, match=msg): tm.assert_index_equal(idx1, idx2, check_exact=check_exact) def test_index_equal_length_mismatch(check_exact): msg = """Index are different Index length are different \\[left\\]: 3, Int64Index\\(\\[1, 2, 3\\], dtype='int64'\\) \\[right\\]: 4, Int64Index\\(\\[1, 2, 3, 4\\], dtype='int64'\\)""" idx1 = Index([1, 2, 3]) idx2 = Index([1, 2, 3, 4]) with pytest.raises(AssertionError, match=msg): tm.assert_index_equal(idx1, idx2, check_exact=check_exact) def test_index_equal_class_mismatch(check_exact): msg = """Index are different Index classes are different \\[left\\]: Int64Index\\(\\[1, 2, 3\\], dtype='int64'\\) \\[right\\]: Float64Index\\(\\[1\\.0, 2\\.0, 3\\.0\\], dtype='float64'\\)""" idx1 = Index([1, 2, 3]) idx2 = Index([1, 2, 3.0]) with pytest.raises(AssertionError, match=msg): tm.assert_index_equal(idx1, idx2, exact=True, check_exact=check_exact) def test_index_equal_values_close(check_exact): idx1 = Index([1, 2, 3.0]) idx2 = Index([1, 2, 3.0000000001]) if check_exact: msg = """Index are different Index values are different \\(33\\.33333 %\\) \\[left\\]: Float64Index\\(\\[1.0, 2.0, 3.0], dtype='float64'\\) \\[right\\]: Float64Index\\(\\[1.0, 2.0, 3.0000000001\\], dtype='float64'\\)""" with pytest.raises(AssertionError, match=msg): tm.assert_index_equal(idx1, idx2, check_exact=check_exact) else: tm.assert_index_equal(idx1, idx2, check_exact=check_exact) def test_index_equal_values_less_close(check_exact, check_less_precise): idx1 = Index([1, 2, 3.0]) idx2 = Index([1, 2, 3.0001]) kwargs = dict(check_exact=check_exact, check_less_precise=check_less_precise) if check_exact or not check_less_precise: msg = """Index are different Index values are different \\(33\\.33333 %\\) \\[left\\]: Float64Index\\(\\[1.0, 2.0, 3.0], dtype='float64'\\) \\[right\\]: Float64Index\\(\\[1.0, 2.0, 3.0001\\], dtype='float64'\\)""" with pytest.raises(AssertionError, match=msg): tm.assert_index_equal(idx1, idx2, **kwargs) else: tm.assert_index_equal(idx1, idx2, **kwargs) def test_index_equal_values_too_far(check_exact, check_less_precise): idx1 = Index([1, 2, 3]) idx2 = Index([1, 2, 4]) kwargs = dict(check_exact=check_exact, check_less_precise=check_less_precise) msg = """Index are different Index values are different \\(33\\.33333 %\\) \\[left\\]: Int64Index\\(\\[1, 2, 3\\], dtype='int64'\\) \\[right\\]: Int64Index\\(\\[1, 2, 4\\], dtype='int64'\\)""" with pytest.raises(AssertionError, match=msg): tm.assert_index_equal(idx1, idx2, **kwargs) def test_index_equal_level_values_mismatch(check_exact, check_less_precise): idx1 = MultiIndex.from_tuples([("A", 2), ("A", 2), ("B", 3), ("B", 4)]) idx2 = MultiIndex.from_tuples([("A", 1), ("A", 2), ("B", 3), ("B", 4)]) kwargs = dict(check_exact=check_exact, check_less_precise=check_less_precise) msg = """MultiIndex level \\[1\\] are different MultiIndex level \\[1\\] values are different \\(25\\.0 %\\) \\[left\\]: Int64Index\\(\\[2, 2, 3, 4\\], dtype='int64'\\) \\[right\\]: Int64Index\\(\\[1, 2, 3, 4\\], dtype='int64'\\)""" with pytest.raises(AssertionError, match=msg): tm.assert_index_equal(idx1, idx2, **kwargs) @pytest.mark.parametrize( "name1,name2", [(None, "x"), ("x", "x"), (np.nan, np.nan), (NaT, NaT), (np.nan, NaT)], ) def test_index_equal_names(name1, name2): idx1 = Index([1, 2, 3], name=name1) idx2 = Index([1, 2, 3], name=name2) if name1 == name2 or name1 is name2: tm.assert_index_equal(idx1, idx2) else: name1 = "'x'" if name1 == "x" else name1 name2 = "'x'" if name2 == "x" else name2 msg = f"""Index are different Attribute "names" are different \\[left\\]: \\[{name1}\\] \\[right\\]: \\[{name2}\\]""" with pytest.raises(AssertionError, match=msg): tm.assert_index_equal(idx1, idx2) def test_index_equal_category_mismatch(check_categorical): msg = """Index are different Attribute "dtype" are different \\[left\\]: CategoricalDtype\\(categories=\\['a', 'b'\\], ordered=False\\) \\[right\\]: CategoricalDtype\\(categories=\\['a', 'b', 'c'\\], \ ordered=False\\)""" idx1 = Index(Categorical(["a", "b"])) idx2 = Index(Categorical(["a", "b"], categories=["a", "b", "c"])) if check_categorical: with pytest.raises(AssertionError, match=msg): tm.assert_index_equal(idx1, idx2, check_categorical=check_categorical) else: tm.assert_index_equal(idx1, idx2, check_categorical=check_categorical)
TomAugspurger/pandas
pandas/tests/util/test_assert_index_equal.py
pandas/plotting/_misc.py
""" Helper functions to generate range-like data for DatetimeArray (and possibly TimedeltaArray/PeriodArray) """ from typing import Union import numpy as np from pandas._libs.tslibs import OutOfBoundsDatetime, Timedelta, Timestamp from pandas.tseries.offsets import DateOffset def generate_regular_range( start: Union[Timestamp, Timedelta], end: Union[Timestamp, Timedelta], periods: int, freq: DateOffset, ): """ Generate a range of dates or timestamps with the spans between dates described by the given `freq` DateOffset. Parameters ---------- start : Timedelta, Timestamp or None First point of produced date range. end : Timedelta, Timestamp or None Last point of produced date range. periods : int Number of periods in produced date range. freq : Tick Describes space between dates in produced date range. Returns ------- ndarray[np.int64] Representing nanoseconds. """ start = start.value if start is not None else None end = end.value if end is not None else None stride = freq.nanos if periods is None: b = start # cannot just use e = Timestamp(end) + 1 because arange breaks when # stride is too large, see GH10887 e = b + (end - b) // stride * stride + stride // 2 + 1 elif start is not None: b = start e = _generate_range_overflow_safe(b, periods, stride, side="start") elif end is not None: e = end + stride b = _generate_range_overflow_safe(e, periods, stride, side="end") else: raise ValueError( "at least 'start' or 'end' should be specified if a 'period' is given." ) with np.errstate(over="raise"): # If the range is sufficiently large, np.arange may overflow # and incorrectly return an empty array if not caught. try: values = np.arange(b, e, stride, dtype=np.int64) except FloatingPointError: xdr = [b] while xdr[-1] != e: xdr.append(xdr[-1] + stride) values = np.array(xdr[:-1], dtype=np.int64) return values def _generate_range_overflow_safe( endpoint: int, periods: int, stride: int, side: str = "start" ) -> int: """ Calculate the second endpoint for passing to np.arange, checking to avoid an integer overflow. Catch OverflowError and re-raise as OutOfBoundsDatetime. Parameters ---------- endpoint : int nanosecond timestamp of the known endpoint of the desired range periods : int number of periods in the desired range stride : int nanoseconds between periods in the desired range side : {'start', 'end'} which end of the range `endpoint` refers to Returns ------- other_end : int Raises ------ OutOfBoundsDatetime """ # GH#14187 raise instead of incorrectly wrapping around assert side in ["start", "end"] i64max = np.uint64(np.iinfo(np.int64).max) msg = f"Cannot generate range with {side}={endpoint} and periods={periods}" with np.errstate(over="raise"): # if periods * strides cannot be multiplied within the *uint64* bounds, # we cannot salvage the operation by recursing, so raise try: addend = np.uint64(periods) * np.uint64(np.abs(stride)) except FloatingPointError as err: raise OutOfBoundsDatetime(msg) from err if np.abs(addend) <= i64max: # relatively easy case without casting concerns return _generate_range_overflow_safe_signed(endpoint, periods, stride, side) elif (endpoint > 0 and side == "start" and stride > 0) or ( endpoint < 0 and side == "end" and stride > 0 ): # no chance of not-overflowing raise OutOfBoundsDatetime(msg) elif side == "end" and endpoint > i64max and endpoint - stride <= i64max: # in _generate_regular_range we added `stride` thereby overflowing # the bounds. Adjust to fix this. return _generate_range_overflow_safe( endpoint - stride, periods - 1, stride, side ) # split into smaller pieces mid_periods = periods // 2 remaining = periods - mid_periods assert 0 < remaining < periods, (remaining, periods, endpoint, stride) midpoint = _generate_range_overflow_safe(endpoint, mid_periods, stride, side) return _generate_range_overflow_safe(midpoint, remaining, stride, side) def _generate_range_overflow_safe_signed( endpoint: int, periods: int, stride: int, side: str ) -> int: """ A special case for _generate_range_overflow_safe where `periods * stride` can be calculated without overflowing int64 bounds. """ assert side in ["start", "end"] if side == "end": stride *= -1 with np.errstate(over="raise"): addend = np.int64(periods) * np.int64(stride) try: # easy case with no overflows return np.int64(endpoint) + addend except (FloatingPointError, OverflowError): # with endpoint negative and addend positive we risk # FloatingPointError; with reversed signed we risk OverflowError pass # if stride and endpoint had opposite signs, then endpoint + addend # should never overflow. so they must have the same signs assert (stride > 0 and endpoint >= 0) or (stride < 0 and endpoint <= 0) if stride > 0: # watch out for very special case in which we just slightly # exceed implementation bounds, but when passing the result to # np.arange will get a result slightly within the bounds result = np.uint64(endpoint) + np.uint64(addend) i64max = np.uint64(np.iinfo(np.int64).max) assert result > i64max if result <= i64max + np.uint64(stride): return result raise OutOfBoundsDatetime( f"Cannot generate range with {side}={endpoint} and periods={periods}" )
import numpy as np import pytest from pandas import Categorical, Index, MultiIndex, NaT import pandas._testing as tm def test_index_equal_levels_mismatch(): msg = """Index are different Index levels are different \\[left\\]: 1, Int64Index\\(\\[1, 2, 3\\], dtype='int64'\\) \\[right\\]: 2, MultiIndex\\(\\[\\('A', 1\\), \\('A', 2\\), \\('B', 3\\), \\('B', 4\\)\\], \\)""" idx1 = Index([1, 2, 3]) idx2 = MultiIndex.from_tuples([("A", 1), ("A", 2), ("B", 3), ("B", 4)]) with pytest.raises(AssertionError, match=msg): tm.assert_index_equal(idx1, idx2, exact=False) def test_index_equal_values_mismatch(check_exact): msg = """MultiIndex level \\[1\\] are different MultiIndex level \\[1\\] values are different \\(25\\.0 %\\) \\[left\\]: Int64Index\\(\\[2, 2, 3, 4\\], dtype='int64'\\) \\[right\\]: Int64Index\\(\\[1, 2, 3, 4\\], dtype='int64'\\)""" idx1 = MultiIndex.from_tuples([("A", 2), ("A", 2), ("B", 3), ("B", 4)]) idx2 = MultiIndex.from_tuples([("A", 1), ("A", 2), ("B", 3), ("B", 4)]) with pytest.raises(AssertionError, match=msg): tm.assert_index_equal(idx1, idx2, check_exact=check_exact) def test_index_equal_length_mismatch(check_exact): msg = """Index are different Index length are different \\[left\\]: 3, Int64Index\\(\\[1, 2, 3\\], dtype='int64'\\) \\[right\\]: 4, Int64Index\\(\\[1, 2, 3, 4\\], dtype='int64'\\)""" idx1 = Index([1, 2, 3]) idx2 = Index([1, 2, 3, 4]) with pytest.raises(AssertionError, match=msg): tm.assert_index_equal(idx1, idx2, check_exact=check_exact) def test_index_equal_class_mismatch(check_exact): msg = """Index are different Index classes are different \\[left\\]: Int64Index\\(\\[1, 2, 3\\], dtype='int64'\\) \\[right\\]: Float64Index\\(\\[1\\.0, 2\\.0, 3\\.0\\], dtype='float64'\\)""" idx1 = Index([1, 2, 3]) idx2 = Index([1, 2, 3.0]) with pytest.raises(AssertionError, match=msg): tm.assert_index_equal(idx1, idx2, exact=True, check_exact=check_exact) def test_index_equal_values_close(check_exact): idx1 = Index([1, 2, 3.0]) idx2 = Index([1, 2, 3.0000000001]) if check_exact: msg = """Index are different Index values are different \\(33\\.33333 %\\) \\[left\\]: Float64Index\\(\\[1.0, 2.0, 3.0], dtype='float64'\\) \\[right\\]: Float64Index\\(\\[1.0, 2.0, 3.0000000001\\], dtype='float64'\\)""" with pytest.raises(AssertionError, match=msg): tm.assert_index_equal(idx1, idx2, check_exact=check_exact) else: tm.assert_index_equal(idx1, idx2, check_exact=check_exact) def test_index_equal_values_less_close(check_exact, check_less_precise): idx1 = Index([1, 2, 3.0]) idx2 = Index([1, 2, 3.0001]) kwargs = dict(check_exact=check_exact, check_less_precise=check_less_precise) if check_exact or not check_less_precise: msg = """Index are different Index values are different \\(33\\.33333 %\\) \\[left\\]: Float64Index\\(\\[1.0, 2.0, 3.0], dtype='float64'\\) \\[right\\]: Float64Index\\(\\[1.0, 2.0, 3.0001\\], dtype='float64'\\)""" with pytest.raises(AssertionError, match=msg): tm.assert_index_equal(idx1, idx2, **kwargs) else: tm.assert_index_equal(idx1, idx2, **kwargs) def test_index_equal_values_too_far(check_exact, check_less_precise): idx1 = Index([1, 2, 3]) idx2 = Index([1, 2, 4]) kwargs = dict(check_exact=check_exact, check_less_precise=check_less_precise) msg = """Index are different Index values are different \\(33\\.33333 %\\) \\[left\\]: Int64Index\\(\\[1, 2, 3\\], dtype='int64'\\) \\[right\\]: Int64Index\\(\\[1, 2, 4\\], dtype='int64'\\)""" with pytest.raises(AssertionError, match=msg): tm.assert_index_equal(idx1, idx2, **kwargs) def test_index_equal_level_values_mismatch(check_exact, check_less_precise): idx1 = MultiIndex.from_tuples([("A", 2), ("A", 2), ("B", 3), ("B", 4)]) idx2 = MultiIndex.from_tuples([("A", 1), ("A", 2), ("B", 3), ("B", 4)]) kwargs = dict(check_exact=check_exact, check_less_precise=check_less_precise) msg = """MultiIndex level \\[1\\] are different MultiIndex level \\[1\\] values are different \\(25\\.0 %\\) \\[left\\]: Int64Index\\(\\[2, 2, 3, 4\\], dtype='int64'\\) \\[right\\]: Int64Index\\(\\[1, 2, 3, 4\\], dtype='int64'\\)""" with pytest.raises(AssertionError, match=msg): tm.assert_index_equal(idx1, idx2, **kwargs) @pytest.mark.parametrize( "name1,name2", [(None, "x"), ("x", "x"), (np.nan, np.nan), (NaT, NaT), (np.nan, NaT)], ) def test_index_equal_names(name1, name2): idx1 = Index([1, 2, 3], name=name1) idx2 = Index([1, 2, 3], name=name2) if name1 == name2 or name1 is name2: tm.assert_index_equal(idx1, idx2) else: name1 = "'x'" if name1 == "x" else name1 name2 = "'x'" if name2 == "x" else name2 msg = f"""Index are different Attribute "names" are different \\[left\\]: \\[{name1}\\] \\[right\\]: \\[{name2}\\]""" with pytest.raises(AssertionError, match=msg): tm.assert_index_equal(idx1, idx2) def test_index_equal_category_mismatch(check_categorical): msg = """Index are different Attribute "dtype" are different \\[left\\]: CategoricalDtype\\(categories=\\['a', 'b'\\], ordered=False\\) \\[right\\]: CategoricalDtype\\(categories=\\['a', 'b', 'c'\\], \ ordered=False\\)""" idx1 = Index(Categorical(["a", "b"])) idx2 = Index(Categorical(["a", "b"], categories=["a", "b", "c"])) if check_categorical: with pytest.raises(AssertionError, match=msg): tm.assert_index_equal(idx1, idx2, check_categorical=check_categorical) else: tm.assert_index_equal(idx1, idx2, check_categorical=check_categorical)
TomAugspurger/pandas
pandas/tests/util/test_assert_index_equal.py
pandas/core/arrays/_ranges.py
"""Provide configuration end points for Automations.""" from collections import OrderedDict import uuid from homeassistant.components.automation import DOMAIN, PLATFORM_SCHEMA from homeassistant.components.automation.config import async_validate_config_item from homeassistant.config import AUTOMATION_CONFIG_PATH from homeassistant.const import CONF_ID, SERVICE_RELOAD from homeassistant.helpers import config_validation as cv, entity_registry from . import ACTION_DELETE, EditIdBasedConfigView async def async_setup(hass): """Set up the Automation config API.""" async def hook(action, config_key): """post_write_hook for Config View that reloads automations.""" await hass.services.async_call(DOMAIN, SERVICE_RELOAD) if action != ACTION_DELETE: return ent_reg = await entity_registry.async_get_registry(hass) entity_id = ent_reg.async_get_entity_id(DOMAIN, DOMAIN, config_key) if entity_id is None: return ent_reg.async_remove(entity_id) hass.http.register_view( EditAutomationConfigView( DOMAIN, "config", AUTOMATION_CONFIG_PATH, cv.string, PLATFORM_SCHEMA, post_write_hook=hook, data_validator=async_validate_config_item, ) ) return True class EditAutomationConfigView(EditIdBasedConfigView): """Edit automation config.""" def _write_value(self, hass, data, config_key, new_value): """Set value.""" index = None for index, cur_value in enumerate(data): # When people copy paste their automations to the config file, # they sometimes forget to add IDs. Fix it here. if CONF_ID not in cur_value: cur_value[CONF_ID] = uuid.uuid4().hex elif cur_value[CONF_ID] == config_key: break else: cur_value = OrderedDict() cur_value[CONF_ID] = config_key index = len(data) data.append(cur_value) # Iterate through some keys that we want to have ordered in the output updated_value = OrderedDict() for key in ("id", "alias", "description", "trigger", "condition", "action"): if key in cur_value: updated_value[key] = cur_value[key] if key in new_value: updated_value[key] = new_value[key] # We cover all current fields above, but just in case we start # supporting more fields in the future. updated_value.update(cur_value) updated_value.update(new_value) data[index] = updated_value
"""Tests for the SolarEdge config flow.""" import pytest from requests.exceptions import ConnectTimeout, HTTPError from homeassistant import data_entry_flow from homeassistant.components.solaredge import config_flow from homeassistant.components.solaredge.const import CONF_SITE_ID, DEFAULT_NAME from homeassistant.const import CONF_API_KEY, CONF_NAME from tests.async_mock import Mock, patch from tests.common import MockConfigEntry NAME = "solaredge site 1 2 3" SITE_ID = "1a2b3c4d5e6f7g8h" API_KEY = "a1b2c3d4e5f6g7h8" @pytest.fixture(name="test_api") def mock_controller(): """Mock a successful Solaredge API.""" api = Mock() api.get_details.return_value = {"details": {"status": "active"}} with patch("solaredge.Solaredge", return_value=api): yield api def init_config_flow(hass): """Init a configuration flow.""" flow = config_flow.SolarEdgeConfigFlow() flow.hass = hass return flow async def test_user(hass, test_api): """Test user config.""" flow = init_config_flow(hass) result = await flow.async_step_user() assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "user" # tets with all provided result = await flow.async_step_user( {CONF_NAME: NAME, CONF_API_KEY: API_KEY, CONF_SITE_ID: SITE_ID} ) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["title"] == "solaredge_site_1_2_3" assert result["data"][CONF_SITE_ID] == SITE_ID assert result["data"][CONF_API_KEY] == API_KEY async def test_import(hass, test_api): """Test import step.""" flow = init_config_flow(hass) # import with site_id and api_key result = await flow.async_step_import( {CONF_API_KEY: API_KEY, CONF_SITE_ID: SITE_ID} ) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["title"] == "solaredge" assert result["data"][CONF_SITE_ID] == SITE_ID assert result["data"][CONF_API_KEY] == API_KEY # import with all result = await flow.async_step_import( {CONF_API_KEY: API_KEY, CONF_SITE_ID: SITE_ID, CONF_NAME: NAME} ) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["title"] == "solaredge_site_1_2_3" assert result["data"][CONF_SITE_ID] == SITE_ID assert result["data"][CONF_API_KEY] == API_KEY async def test_abort_if_already_setup(hass, test_api): """Test we abort if the site_id is already setup.""" flow = init_config_flow(hass) MockConfigEntry( domain="solaredge", data={CONF_NAME: DEFAULT_NAME, CONF_SITE_ID: SITE_ID, CONF_API_KEY: API_KEY}, ).add_to_hass(hass) # import: Should fail, same SITE_ID result = await flow.async_step_import( {CONF_NAME: DEFAULT_NAME, CONF_SITE_ID: SITE_ID, CONF_API_KEY: API_KEY} ) assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT assert result["reason"] == "site_exists" # user: Should fail, same SITE_ID result = await flow.async_step_user( {CONF_NAME: "test", CONF_SITE_ID: SITE_ID, CONF_API_KEY: "test"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["errors"] == {CONF_SITE_ID: "site_exists"} async def test_asserts(hass, test_api): """Test the _site_in_configuration_exists method.""" flow = init_config_flow(hass) # test with inactive site test_api.get_details.return_value = {"details": {"status": "NOK"}} result = await flow.async_step_user( {CONF_NAME: NAME, CONF_API_KEY: API_KEY, CONF_SITE_ID: SITE_ID} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["errors"] == {CONF_SITE_ID: "site_not_active"} # test with api_failure test_api.get_details.return_value = {} result = await flow.async_step_user( {CONF_NAME: NAME, CONF_API_KEY: API_KEY, CONF_SITE_ID: SITE_ID} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["errors"] == {CONF_SITE_ID: "api_failure"} # test with ConnectionTimeout test_api.get_details.side_effect = ConnectTimeout() result = await flow.async_step_user( {CONF_NAME: NAME, CONF_API_KEY: API_KEY, CONF_SITE_ID: SITE_ID} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["errors"] == {CONF_SITE_ID: "could_not_connect"} # test with HTTPError test_api.get_details.side_effect = HTTPError() result = await flow.async_step_user( {CONF_NAME: NAME, CONF_API_KEY: API_KEY, CONF_SITE_ID: SITE_ID} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["errors"] == {CONF_SITE_ID: "could_not_connect"}
robbiet480/home-assistant
tests/components/solaredge/test_config_flow.py
homeassistant/components/config/automation.py
"""Support for Genius Hub switch/outlet devices.""" from homeassistant.components.switch import DEVICE_CLASS_OUTLET, SwitchEntity from homeassistant.helpers.typing import ConfigType, HomeAssistantType from . import DOMAIN, GeniusZone ATTR_DURATION = "duration" GH_ON_OFF_ZONE = "on / off" async def async_setup_platform( hass: HomeAssistantType, config: ConfigType, async_add_entities, discovery_info=None ) -> None: """Set up the Genius Hub switch entities.""" if discovery_info is None: return broker = hass.data[DOMAIN]["broker"] async_add_entities( [ GeniusSwitch(broker, z) for z in broker.client.zone_objs if z.data["type"] == GH_ON_OFF_ZONE ] ) class GeniusSwitch(GeniusZone, SwitchEntity): """Representation of a Genius Hub switch.""" @property def device_class(self): """Return the class of this device, from component DEVICE_CLASSES.""" return DEVICE_CLASS_OUTLET @property def is_on(self) -> bool: """Return the current state of the on/off zone. The zone is considered 'on' if & only if it is override/on (e.g. timer/on is 'off'). """ return self._zone.data["mode"] == "override" and self._zone.data["setpoint"] async def async_turn_off(self, **kwargs) -> None: """Send the zone to Timer mode. The zone is deemed 'off' in this mode, although the plugs may actually be on. """ await self._zone.set_mode("timer") async def async_turn_on(self, **kwargs) -> None: """Set the zone to override/on ({'setpoint': true}) for x seconds.""" await self._zone.set_override(1, kwargs.get(ATTR_DURATION, 3600))
"""Tests for the SolarEdge config flow.""" import pytest from requests.exceptions import ConnectTimeout, HTTPError from homeassistant import data_entry_flow from homeassistant.components.solaredge import config_flow from homeassistant.components.solaredge.const import CONF_SITE_ID, DEFAULT_NAME from homeassistant.const import CONF_API_KEY, CONF_NAME from tests.async_mock import Mock, patch from tests.common import MockConfigEntry NAME = "solaredge site 1 2 3" SITE_ID = "1a2b3c4d5e6f7g8h" API_KEY = "a1b2c3d4e5f6g7h8" @pytest.fixture(name="test_api") def mock_controller(): """Mock a successful Solaredge API.""" api = Mock() api.get_details.return_value = {"details": {"status": "active"}} with patch("solaredge.Solaredge", return_value=api): yield api def init_config_flow(hass): """Init a configuration flow.""" flow = config_flow.SolarEdgeConfigFlow() flow.hass = hass return flow async def test_user(hass, test_api): """Test user config.""" flow = init_config_flow(hass) result = await flow.async_step_user() assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "user" # tets with all provided result = await flow.async_step_user( {CONF_NAME: NAME, CONF_API_KEY: API_KEY, CONF_SITE_ID: SITE_ID} ) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["title"] == "solaredge_site_1_2_3" assert result["data"][CONF_SITE_ID] == SITE_ID assert result["data"][CONF_API_KEY] == API_KEY async def test_import(hass, test_api): """Test import step.""" flow = init_config_flow(hass) # import with site_id and api_key result = await flow.async_step_import( {CONF_API_KEY: API_KEY, CONF_SITE_ID: SITE_ID} ) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["title"] == "solaredge" assert result["data"][CONF_SITE_ID] == SITE_ID assert result["data"][CONF_API_KEY] == API_KEY # import with all result = await flow.async_step_import( {CONF_API_KEY: API_KEY, CONF_SITE_ID: SITE_ID, CONF_NAME: NAME} ) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["title"] == "solaredge_site_1_2_3" assert result["data"][CONF_SITE_ID] == SITE_ID assert result["data"][CONF_API_KEY] == API_KEY async def test_abort_if_already_setup(hass, test_api): """Test we abort if the site_id is already setup.""" flow = init_config_flow(hass) MockConfigEntry( domain="solaredge", data={CONF_NAME: DEFAULT_NAME, CONF_SITE_ID: SITE_ID, CONF_API_KEY: API_KEY}, ).add_to_hass(hass) # import: Should fail, same SITE_ID result = await flow.async_step_import( {CONF_NAME: DEFAULT_NAME, CONF_SITE_ID: SITE_ID, CONF_API_KEY: API_KEY} ) assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT assert result["reason"] == "site_exists" # user: Should fail, same SITE_ID result = await flow.async_step_user( {CONF_NAME: "test", CONF_SITE_ID: SITE_ID, CONF_API_KEY: "test"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["errors"] == {CONF_SITE_ID: "site_exists"} async def test_asserts(hass, test_api): """Test the _site_in_configuration_exists method.""" flow = init_config_flow(hass) # test with inactive site test_api.get_details.return_value = {"details": {"status": "NOK"}} result = await flow.async_step_user( {CONF_NAME: NAME, CONF_API_KEY: API_KEY, CONF_SITE_ID: SITE_ID} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["errors"] == {CONF_SITE_ID: "site_not_active"} # test with api_failure test_api.get_details.return_value = {} result = await flow.async_step_user( {CONF_NAME: NAME, CONF_API_KEY: API_KEY, CONF_SITE_ID: SITE_ID} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["errors"] == {CONF_SITE_ID: "api_failure"} # test with ConnectionTimeout test_api.get_details.side_effect = ConnectTimeout() result = await flow.async_step_user( {CONF_NAME: NAME, CONF_API_KEY: API_KEY, CONF_SITE_ID: SITE_ID} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["errors"] == {CONF_SITE_ID: "could_not_connect"} # test with HTTPError test_api.get_details.side_effect = HTTPError() result = await flow.async_step_user( {CONF_NAME: NAME, CONF_API_KEY: API_KEY, CONF_SITE_ID: SITE_ID} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["errors"] == {CONF_SITE_ID: "could_not_connect"}
robbiet480/home-assistant
tests/components/solaredge/test_config_flow.py
homeassistant/components/geniushub/switch.py
"""STT constante.""" from enum import Enum DOMAIN = "stt" class AudioCodecs(str, Enum): """Supported Audio codecs.""" PCM = "pcm" OPUS = "opus" class AudioFormats(str, Enum): """Supported Audio formats.""" WAV = "wav" OGG = "ogg" class AudioBitRates(int, Enum): """Supported Audio bit rates.""" BITRATE_8 = 8 BITRATE_16 = 16 BITRATE_24 = 24 BITRATE_32 = 32 class AudioSampleRates(int, Enum): """Supported Audio sample rates.""" SAMPLERATE_8000 = 8000 SAMPLERATE_11000 = 11000 SAMPLERATE_16000 = 16000 SAMPLERATE_18900 = 18900 SAMPLERATE_22000 = 22000 SAMPLERATE_32000 = 32000 SAMPLERATE_37800 = 37800 SAMPLERATE_44100 = 44100 SAMPLERATE_48000 = 48000 class AudioChannels(int, Enum): """Supported Audio channel.""" CHANNEL_MONO = 1 CHANNEL_STEREO = 2 class SpeechResultState(str, Enum): """Result state of speech.""" SUCCESS = "success" ERROR = "error"
"""Tests for the SolarEdge config flow.""" import pytest from requests.exceptions import ConnectTimeout, HTTPError from homeassistant import data_entry_flow from homeassistant.components.solaredge import config_flow from homeassistant.components.solaredge.const import CONF_SITE_ID, DEFAULT_NAME from homeassistant.const import CONF_API_KEY, CONF_NAME from tests.async_mock import Mock, patch from tests.common import MockConfigEntry NAME = "solaredge site 1 2 3" SITE_ID = "1a2b3c4d5e6f7g8h" API_KEY = "a1b2c3d4e5f6g7h8" @pytest.fixture(name="test_api") def mock_controller(): """Mock a successful Solaredge API.""" api = Mock() api.get_details.return_value = {"details": {"status": "active"}} with patch("solaredge.Solaredge", return_value=api): yield api def init_config_flow(hass): """Init a configuration flow.""" flow = config_flow.SolarEdgeConfigFlow() flow.hass = hass return flow async def test_user(hass, test_api): """Test user config.""" flow = init_config_flow(hass) result = await flow.async_step_user() assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "user" # tets with all provided result = await flow.async_step_user( {CONF_NAME: NAME, CONF_API_KEY: API_KEY, CONF_SITE_ID: SITE_ID} ) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["title"] == "solaredge_site_1_2_3" assert result["data"][CONF_SITE_ID] == SITE_ID assert result["data"][CONF_API_KEY] == API_KEY async def test_import(hass, test_api): """Test import step.""" flow = init_config_flow(hass) # import with site_id and api_key result = await flow.async_step_import( {CONF_API_KEY: API_KEY, CONF_SITE_ID: SITE_ID} ) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["title"] == "solaredge" assert result["data"][CONF_SITE_ID] == SITE_ID assert result["data"][CONF_API_KEY] == API_KEY # import with all result = await flow.async_step_import( {CONF_API_KEY: API_KEY, CONF_SITE_ID: SITE_ID, CONF_NAME: NAME} ) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["title"] == "solaredge_site_1_2_3" assert result["data"][CONF_SITE_ID] == SITE_ID assert result["data"][CONF_API_KEY] == API_KEY async def test_abort_if_already_setup(hass, test_api): """Test we abort if the site_id is already setup.""" flow = init_config_flow(hass) MockConfigEntry( domain="solaredge", data={CONF_NAME: DEFAULT_NAME, CONF_SITE_ID: SITE_ID, CONF_API_KEY: API_KEY}, ).add_to_hass(hass) # import: Should fail, same SITE_ID result = await flow.async_step_import( {CONF_NAME: DEFAULT_NAME, CONF_SITE_ID: SITE_ID, CONF_API_KEY: API_KEY} ) assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT assert result["reason"] == "site_exists" # user: Should fail, same SITE_ID result = await flow.async_step_user( {CONF_NAME: "test", CONF_SITE_ID: SITE_ID, CONF_API_KEY: "test"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["errors"] == {CONF_SITE_ID: "site_exists"} async def test_asserts(hass, test_api): """Test the _site_in_configuration_exists method.""" flow = init_config_flow(hass) # test with inactive site test_api.get_details.return_value = {"details": {"status": "NOK"}} result = await flow.async_step_user( {CONF_NAME: NAME, CONF_API_KEY: API_KEY, CONF_SITE_ID: SITE_ID} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["errors"] == {CONF_SITE_ID: "site_not_active"} # test with api_failure test_api.get_details.return_value = {} result = await flow.async_step_user( {CONF_NAME: NAME, CONF_API_KEY: API_KEY, CONF_SITE_ID: SITE_ID} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["errors"] == {CONF_SITE_ID: "api_failure"} # test with ConnectionTimeout test_api.get_details.side_effect = ConnectTimeout() result = await flow.async_step_user( {CONF_NAME: NAME, CONF_API_KEY: API_KEY, CONF_SITE_ID: SITE_ID} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["errors"] == {CONF_SITE_ID: "could_not_connect"} # test with HTTPError test_api.get_details.side_effect = HTTPError() result = await flow.async_step_user( {CONF_NAME: NAME, CONF_API_KEY: API_KEY, CONF_SITE_ID: SITE_ID} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["errors"] == {CONF_SITE_ID: "could_not_connect"}
robbiet480/home-assistant
tests/components/solaredge/test_config_flow.py
homeassistant/components/stt/const.py
"""Helpers for amcrest component.""" import logging from .const import DOMAIN def service_signal(service, *args): """Encode signal.""" return "_".join([DOMAIN, service, *args]) def log_update_error(logger, action, name, entity_type, error, level=logging.ERROR): """Log an update error.""" logger.log( level, "Could not %s %s %s due to error: %s", action, name, entity_type, error.__class__.__name__, )
"""Tests for the SolarEdge config flow.""" import pytest from requests.exceptions import ConnectTimeout, HTTPError from homeassistant import data_entry_flow from homeassistant.components.solaredge import config_flow from homeassistant.components.solaredge.const import CONF_SITE_ID, DEFAULT_NAME from homeassistant.const import CONF_API_KEY, CONF_NAME from tests.async_mock import Mock, patch from tests.common import MockConfigEntry NAME = "solaredge site 1 2 3" SITE_ID = "1a2b3c4d5e6f7g8h" API_KEY = "a1b2c3d4e5f6g7h8" @pytest.fixture(name="test_api") def mock_controller(): """Mock a successful Solaredge API.""" api = Mock() api.get_details.return_value = {"details": {"status": "active"}} with patch("solaredge.Solaredge", return_value=api): yield api def init_config_flow(hass): """Init a configuration flow.""" flow = config_flow.SolarEdgeConfigFlow() flow.hass = hass return flow async def test_user(hass, test_api): """Test user config.""" flow = init_config_flow(hass) result = await flow.async_step_user() assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "user" # tets with all provided result = await flow.async_step_user( {CONF_NAME: NAME, CONF_API_KEY: API_KEY, CONF_SITE_ID: SITE_ID} ) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["title"] == "solaredge_site_1_2_3" assert result["data"][CONF_SITE_ID] == SITE_ID assert result["data"][CONF_API_KEY] == API_KEY async def test_import(hass, test_api): """Test import step.""" flow = init_config_flow(hass) # import with site_id and api_key result = await flow.async_step_import( {CONF_API_KEY: API_KEY, CONF_SITE_ID: SITE_ID} ) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["title"] == "solaredge" assert result["data"][CONF_SITE_ID] == SITE_ID assert result["data"][CONF_API_KEY] == API_KEY # import with all result = await flow.async_step_import( {CONF_API_KEY: API_KEY, CONF_SITE_ID: SITE_ID, CONF_NAME: NAME} ) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["title"] == "solaredge_site_1_2_3" assert result["data"][CONF_SITE_ID] == SITE_ID assert result["data"][CONF_API_KEY] == API_KEY async def test_abort_if_already_setup(hass, test_api): """Test we abort if the site_id is already setup.""" flow = init_config_flow(hass) MockConfigEntry( domain="solaredge", data={CONF_NAME: DEFAULT_NAME, CONF_SITE_ID: SITE_ID, CONF_API_KEY: API_KEY}, ).add_to_hass(hass) # import: Should fail, same SITE_ID result = await flow.async_step_import( {CONF_NAME: DEFAULT_NAME, CONF_SITE_ID: SITE_ID, CONF_API_KEY: API_KEY} ) assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT assert result["reason"] == "site_exists" # user: Should fail, same SITE_ID result = await flow.async_step_user( {CONF_NAME: "test", CONF_SITE_ID: SITE_ID, CONF_API_KEY: "test"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["errors"] == {CONF_SITE_ID: "site_exists"} async def test_asserts(hass, test_api): """Test the _site_in_configuration_exists method.""" flow = init_config_flow(hass) # test with inactive site test_api.get_details.return_value = {"details": {"status": "NOK"}} result = await flow.async_step_user( {CONF_NAME: NAME, CONF_API_KEY: API_KEY, CONF_SITE_ID: SITE_ID} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["errors"] == {CONF_SITE_ID: "site_not_active"} # test with api_failure test_api.get_details.return_value = {} result = await flow.async_step_user( {CONF_NAME: NAME, CONF_API_KEY: API_KEY, CONF_SITE_ID: SITE_ID} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["errors"] == {CONF_SITE_ID: "api_failure"} # test with ConnectionTimeout test_api.get_details.side_effect = ConnectTimeout() result = await flow.async_step_user( {CONF_NAME: NAME, CONF_API_KEY: API_KEY, CONF_SITE_ID: SITE_ID} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["errors"] == {CONF_SITE_ID: "could_not_connect"} # test with HTTPError test_api.get_details.side_effect = HTTPError() result = await flow.async_step_user( {CONF_NAME: NAME, CONF_API_KEY: API_KEY, CONF_SITE_ID: SITE_ID} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["errors"] == {CONF_SITE_ID: "could_not_connect"}
robbiet480/home-assistant
tests/components/solaredge/test_config_flow.py
homeassistant/components/amcrest/helpers.py
"""Component to interface with various media players.""" import asyncio import base64 import collections from datetime import timedelta import functools as ft import hashlib import logging from random import SystemRandom from typing import Optional from urllib.parse import urlparse from aiohttp import web from aiohttp.hdrs import CACHE_CONTROL, CONTENT_TYPE from aiohttp.typedefs import LooseHeaders import async_timeout import voluptuous as vol from homeassistant.components import websocket_api from homeassistant.components.http import KEY_AUTHENTICATED, HomeAssistantView from homeassistant.const import ( HTTP_INTERNAL_SERVER_ERROR, HTTP_NOT_FOUND, HTTP_OK, SERVICE_MEDIA_NEXT_TRACK, SERVICE_MEDIA_PAUSE, SERVICE_MEDIA_PLAY, SERVICE_MEDIA_PLAY_PAUSE, SERVICE_MEDIA_PREVIOUS_TRACK, SERVICE_MEDIA_SEEK, SERVICE_MEDIA_STOP, SERVICE_SHUFFLE_SET, SERVICE_TOGGLE, SERVICE_TURN_OFF, SERVICE_TURN_ON, SERVICE_VOLUME_DOWN, SERVICE_VOLUME_MUTE, SERVICE_VOLUME_SET, SERVICE_VOLUME_UP, STATE_IDLE, STATE_OFF, STATE_PLAYING, ) from homeassistant.helpers.aiohttp_client import async_get_clientsession import homeassistant.helpers.config_validation as cv from homeassistant.helpers.config_validation import ( # noqa: F401 PLATFORM_SCHEMA, PLATFORM_SCHEMA_BASE, ) from homeassistant.helpers.entity import Entity from homeassistant.helpers.entity_component import EntityComponent from homeassistant.helpers.network import get_url from homeassistant.loader import bind_hass from .const import ( ATTR_APP_ID, ATTR_APP_NAME, ATTR_INPUT_SOURCE, ATTR_INPUT_SOURCE_LIST, ATTR_MEDIA_ALBUM_ARTIST, ATTR_MEDIA_ALBUM_NAME, ATTR_MEDIA_ARTIST, ATTR_MEDIA_CHANNEL, ATTR_MEDIA_CONTENT_ID, ATTR_MEDIA_CONTENT_TYPE, ATTR_MEDIA_DURATION, ATTR_MEDIA_ENQUEUE, ATTR_MEDIA_EPISODE, ATTR_MEDIA_PLAYLIST, ATTR_MEDIA_POSITION, ATTR_MEDIA_POSITION_UPDATED_AT, ATTR_MEDIA_SEASON, ATTR_MEDIA_SEEK_POSITION, ATTR_MEDIA_SERIES_TITLE, ATTR_MEDIA_SHUFFLE, ATTR_MEDIA_TITLE, ATTR_MEDIA_TRACK, ATTR_MEDIA_VOLUME_LEVEL, ATTR_MEDIA_VOLUME_MUTED, ATTR_SOUND_MODE, ATTR_SOUND_MODE_LIST, DOMAIN, SERVICE_CLEAR_PLAYLIST, SERVICE_PLAY_MEDIA, SERVICE_SELECT_SOUND_MODE, SERVICE_SELECT_SOURCE, SUPPORT_CLEAR_PLAYLIST, SUPPORT_NEXT_TRACK, SUPPORT_PAUSE, SUPPORT_PLAY, SUPPORT_PLAY_MEDIA, SUPPORT_PREVIOUS_TRACK, SUPPORT_SEEK, SUPPORT_SELECT_SOUND_MODE, SUPPORT_SELECT_SOURCE, SUPPORT_SHUFFLE_SET, SUPPORT_STOP, SUPPORT_TURN_OFF, SUPPORT_TURN_ON, SUPPORT_VOLUME_MUTE, SUPPORT_VOLUME_SET, SUPPORT_VOLUME_STEP, ) # mypy: allow-untyped-defs, no-check-untyped-defs _LOGGER = logging.getLogger(__name__) _RND = SystemRandom() ENTITY_ID_FORMAT = DOMAIN + ".{}" CACHE_IMAGES = "images" CACHE_MAXSIZE = "maxsize" CACHE_LOCK = "lock" CACHE_URL = "url" CACHE_CONTENT = "content" ENTITY_IMAGE_CACHE = {CACHE_IMAGES: collections.OrderedDict(), CACHE_MAXSIZE: 16} SCAN_INTERVAL = timedelta(seconds=10) DEVICE_CLASS_TV = "tv" DEVICE_CLASS_SPEAKER = "speaker" DEVICE_CLASSES = [DEVICE_CLASS_TV, DEVICE_CLASS_SPEAKER] DEVICE_CLASSES_SCHEMA = vol.All(vol.Lower, vol.In(DEVICE_CLASSES)) MEDIA_PLAYER_PLAY_MEDIA_SCHEMA = { vol.Required(ATTR_MEDIA_CONTENT_TYPE): cv.string, vol.Required(ATTR_MEDIA_CONTENT_ID): cv.string, vol.Optional(ATTR_MEDIA_ENQUEUE): cv.boolean, } ATTR_TO_PROPERTY = [ ATTR_MEDIA_VOLUME_LEVEL, ATTR_MEDIA_VOLUME_MUTED, ATTR_MEDIA_CONTENT_ID, ATTR_MEDIA_CONTENT_TYPE, ATTR_MEDIA_DURATION, ATTR_MEDIA_POSITION, ATTR_MEDIA_POSITION_UPDATED_AT, ATTR_MEDIA_TITLE, ATTR_MEDIA_ARTIST, ATTR_MEDIA_ALBUM_NAME, ATTR_MEDIA_ALBUM_ARTIST, ATTR_MEDIA_TRACK, ATTR_MEDIA_SERIES_TITLE, ATTR_MEDIA_SEASON, ATTR_MEDIA_EPISODE, ATTR_MEDIA_CHANNEL, ATTR_MEDIA_PLAYLIST, ATTR_APP_ID, ATTR_APP_NAME, ATTR_INPUT_SOURCE, ATTR_SOUND_MODE, ATTR_MEDIA_SHUFFLE, ] @bind_hass def is_on(hass, entity_id=None): """ Return true if specified media player entity_id is on. Check all media player if no entity_id specified. """ entity_ids = [entity_id] if entity_id else hass.states.entity_ids(DOMAIN) return any( not hass.states.is_state(entity_id, STATE_OFF) for entity_id in entity_ids ) WS_TYPE_MEDIA_PLAYER_THUMBNAIL = "media_player_thumbnail" SCHEMA_WEBSOCKET_GET_THUMBNAIL = websocket_api.BASE_COMMAND_MESSAGE_SCHEMA.extend( {"type": WS_TYPE_MEDIA_PLAYER_THUMBNAIL, "entity_id": cv.entity_id} ) def _rename_keys(**keys): """Create validator that renames keys. Necessary because the service schema names do not match the command parameters. Async friendly. """ def rename(value): for to_key, from_key in keys.items(): if from_key in value: value[to_key] = value.pop(from_key) return value return rename async def async_setup(hass, config): """Track states and offer events for media_players.""" component = hass.data[DOMAIN] = EntityComponent( logging.getLogger(__name__), DOMAIN, hass, SCAN_INTERVAL ) hass.components.websocket_api.async_register_command( WS_TYPE_MEDIA_PLAYER_THUMBNAIL, websocket_handle_thumbnail, SCHEMA_WEBSOCKET_GET_THUMBNAIL, ) hass.http.register_view(MediaPlayerImageView(component)) await component.async_setup(config) component.async_register_entity_service( SERVICE_TURN_ON, {}, "async_turn_on", [SUPPORT_TURN_ON] ) component.async_register_entity_service( SERVICE_TURN_OFF, {}, "async_turn_off", [SUPPORT_TURN_OFF] ) component.async_register_entity_service( SERVICE_TOGGLE, {}, "async_toggle", [SUPPORT_TURN_OFF | SUPPORT_TURN_ON] ) component.async_register_entity_service( SERVICE_VOLUME_UP, {}, "async_volume_up", [SUPPORT_VOLUME_SET, SUPPORT_VOLUME_STEP], ) component.async_register_entity_service( SERVICE_VOLUME_DOWN, {}, "async_volume_down", [SUPPORT_VOLUME_SET, SUPPORT_VOLUME_STEP], ) component.async_register_entity_service( SERVICE_MEDIA_PLAY_PAUSE, {}, "async_media_play_pause", [SUPPORT_PLAY | SUPPORT_PAUSE], ) component.async_register_entity_service( SERVICE_MEDIA_PLAY, {}, "async_media_play", [SUPPORT_PLAY] ) component.async_register_entity_service( SERVICE_MEDIA_PAUSE, {}, "async_media_pause", [SUPPORT_PAUSE] ) component.async_register_entity_service( SERVICE_MEDIA_STOP, {}, "async_media_stop", [SUPPORT_STOP] ) component.async_register_entity_service( SERVICE_MEDIA_NEXT_TRACK, {}, "async_media_next_track", [SUPPORT_NEXT_TRACK] ) component.async_register_entity_service( SERVICE_MEDIA_PREVIOUS_TRACK, {}, "async_media_previous_track", [SUPPORT_PREVIOUS_TRACK], ) component.async_register_entity_service( SERVICE_CLEAR_PLAYLIST, {}, "async_clear_playlist", [SUPPORT_CLEAR_PLAYLIST] ) component.async_register_entity_service( SERVICE_VOLUME_SET, vol.All( cv.make_entity_service_schema( {vol.Required(ATTR_MEDIA_VOLUME_LEVEL): cv.small_float} ), _rename_keys(volume=ATTR_MEDIA_VOLUME_LEVEL), ), "async_set_volume_level", [SUPPORT_VOLUME_SET], ) component.async_register_entity_service( SERVICE_VOLUME_MUTE, vol.All( cv.make_entity_service_schema( {vol.Required(ATTR_MEDIA_VOLUME_MUTED): cv.boolean} ), _rename_keys(mute=ATTR_MEDIA_VOLUME_MUTED), ), "async_mute_volume", [SUPPORT_VOLUME_MUTE], ) component.async_register_entity_service( SERVICE_MEDIA_SEEK, vol.All( cv.make_entity_service_schema( { vol.Required(ATTR_MEDIA_SEEK_POSITION): vol.All( vol.Coerce(float), vol.Range(min=0) ) } ), _rename_keys(position=ATTR_MEDIA_SEEK_POSITION), ), "async_media_seek", [SUPPORT_SEEK], ) component.async_register_entity_service( SERVICE_SELECT_SOURCE, {vol.Required(ATTR_INPUT_SOURCE): cv.string}, "async_select_source", [SUPPORT_SELECT_SOURCE], ) component.async_register_entity_service( SERVICE_SELECT_SOUND_MODE, {vol.Required(ATTR_SOUND_MODE): cv.string}, "async_select_sound_mode", [SUPPORT_SELECT_SOUND_MODE], ) component.async_register_entity_service( SERVICE_PLAY_MEDIA, vol.All( cv.make_entity_service_schema(MEDIA_PLAYER_PLAY_MEDIA_SCHEMA), _rename_keys( media_type=ATTR_MEDIA_CONTENT_TYPE, media_id=ATTR_MEDIA_CONTENT_ID, enqueue=ATTR_MEDIA_ENQUEUE, ), ), "async_play_media", [SUPPORT_PLAY_MEDIA], ) component.async_register_entity_service( SERVICE_SHUFFLE_SET, {vol.Required(ATTR_MEDIA_SHUFFLE): cv.boolean}, "async_set_shuffle", [SUPPORT_SHUFFLE_SET], ) return True async def async_setup_entry(hass, entry): """Set up a config entry.""" return await hass.data[DOMAIN].async_setup_entry(entry) async def async_unload_entry(hass, entry): """Unload a config entry.""" return await hass.data[DOMAIN].async_unload_entry(entry) class MediaPlayerEntity(Entity): """ABC for media player entities.""" _access_token: Optional[str] = None # Implement these for your media player @property def state(self): """State of the player.""" return None @property def access_token(self) -> str: """Access token for this media player.""" if self._access_token is None: self._access_token = hashlib.sha256( _RND.getrandbits(256).to_bytes(32, "little") ).hexdigest() return self._access_token @property def volume_level(self): """Volume level of the media player (0..1).""" return None @property def is_volume_muted(self): """Boolean if volume is currently muted.""" return None @property def media_content_id(self): """Content ID of current playing media.""" return None @property def media_content_type(self): """Content type of current playing media.""" return None @property def media_duration(self): """Duration of current playing media in seconds.""" return None @property def media_position(self): """Position of current playing media in seconds.""" return None @property def media_position_updated_at(self): """When was the position of the current playing media valid. Returns value from homeassistant.util.dt.utcnow(). """ return None @property def media_image_url(self): """Image url of current playing media.""" return None @property def media_image_remotely_accessible(self) -> bool: """If the image url is remotely accessible.""" return False @property def media_image_hash(self): """Hash value for media image.""" url = self.media_image_url if url is not None: return hashlib.sha256(url.encode("utf-8")).hexdigest()[:16] return None async def async_get_media_image(self): """Fetch media image of current playing image.""" url = self.media_image_url if url is None: return None, None return await _async_fetch_image(self.hass, url) @property def media_title(self): """Title of current playing media.""" return None @property def media_artist(self): """Artist of current playing media, music track only.""" return None @property def media_album_name(self): """Album name of current playing media, music track only.""" return None @property def media_album_artist(self): """Album artist of current playing media, music track only.""" return None @property def media_track(self): """Track number of current playing media, music track only.""" return None @property def media_series_title(self): """Title of series of current playing media, TV show only.""" return None @property def media_season(self): """Season of current playing media, TV show only.""" return None @property def media_episode(self): """Episode of current playing media, TV show only.""" return None @property def media_channel(self): """Channel currently playing.""" return None @property def media_playlist(self): """Title of Playlist currently playing.""" return None @property def app_id(self): """ID of the current running app.""" return None @property def app_name(self): """Name of the current running app.""" return None @property def source(self): """Name of the current input source.""" return None @property def source_list(self): """List of available input sources.""" return None @property def sound_mode(self): """Name of the current sound mode.""" return None @property def sound_mode_list(self): """List of available sound modes.""" return None @property def shuffle(self): """Boolean if shuffle is enabled.""" return None @property def supported_features(self): """Flag media player features that are supported.""" return 0 def turn_on(self): """Turn the media player on.""" raise NotImplementedError() async def async_turn_on(self): """Turn the media player on.""" await self.hass.async_add_job(self.turn_on) def turn_off(self): """Turn the media player off.""" raise NotImplementedError() async def async_turn_off(self): """Turn the media player off.""" await self.hass.async_add_job(self.turn_off) def mute_volume(self, mute): """Mute the volume.""" raise NotImplementedError() async def async_mute_volume(self, mute): """Mute the volume.""" await self.hass.async_add_job(self.mute_volume, mute) def set_volume_level(self, volume): """Set volume level, range 0..1.""" raise NotImplementedError() async def async_set_volume_level(self, volume): """Set volume level, range 0..1.""" await self.hass.async_add_job(self.set_volume_level, volume) def media_play(self): """Send play command.""" raise NotImplementedError() async def async_media_play(self): """Send play command.""" await self.hass.async_add_job(self.media_play) def media_pause(self): """Send pause command.""" raise NotImplementedError() async def async_media_pause(self): """Send pause command.""" await self.hass.async_add_job(self.media_pause) def media_stop(self): """Send stop command.""" raise NotImplementedError() async def async_media_stop(self): """Send stop command.""" await self.hass.async_add_job(self.media_stop) def media_previous_track(self): """Send previous track command.""" raise NotImplementedError() async def async_media_previous_track(self): """Send previous track command.""" await self.hass.async_add_job(self.media_previous_track) def media_next_track(self): """Send next track command.""" raise NotImplementedError() async def async_media_next_track(self): """Send next track command.""" await self.hass.async_add_job(self.media_next_track) def media_seek(self, position): """Send seek command.""" raise NotImplementedError() async def async_media_seek(self, position): """Send seek command.""" await self.hass.async_add_job(self.media_seek, position) def play_media(self, media_type, media_id, **kwargs): """Play a piece of media.""" raise NotImplementedError() async def async_play_media(self, media_type, media_id, **kwargs): """Play a piece of media.""" await self.hass.async_add_job( ft.partial(self.play_media, media_type, media_id, **kwargs) ) def select_source(self, source): """Select input source.""" raise NotImplementedError() async def async_select_source(self, source): """Select input source.""" await self.hass.async_add_job(self.select_source, source) def select_sound_mode(self, sound_mode): """Select sound mode.""" raise NotImplementedError() async def async_select_sound_mode(self, sound_mode): """Select sound mode.""" await self.hass.async_add_job(self.select_sound_mode, sound_mode) def clear_playlist(self): """Clear players playlist.""" raise NotImplementedError() async def async_clear_playlist(self): """Clear players playlist.""" await self.hass.async_add_job(self.clear_playlist) def set_shuffle(self, shuffle): """Enable/disable shuffle mode.""" raise NotImplementedError() async def async_set_shuffle(self, shuffle): """Enable/disable shuffle mode.""" await self.hass.async_add_job(self.set_shuffle, shuffle) # No need to overwrite these. @property def support_play(self): """Boolean if play is supported.""" return bool(self.supported_features & SUPPORT_PLAY) @property def support_pause(self): """Boolean if pause is supported.""" return bool(self.supported_features & SUPPORT_PAUSE) @property def support_stop(self): """Boolean if stop is supported.""" return bool(self.supported_features & SUPPORT_STOP) @property def support_seek(self): """Boolean if seek is supported.""" return bool(self.supported_features & SUPPORT_SEEK) @property def support_volume_set(self): """Boolean if setting volume is supported.""" return bool(self.supported_features & SUPPORT_VOLUME_SET) @property def support_volume_mute(self): """Boolean if muting volume is supported.""" return bool(self.supported_features & SUPPORT_VOLUME_MUTE) @property def support_previous_track(self): """Boolean if previous track command supported.""" return bool(self.supported_features & SUPPORT_PREVIOUS_TRACK) @property def support_next_track(self): """Boolean if next track command supported.""" return bool(self.supported_features & SUPPORT_NEXT_TRACK) @property def support_play_media(self): """Boolean if play media command supported.""" return bool(self.supported_features & SUPPORT_PLAY_MEDIA) @property def support_select_source(self): """Boolean if select source command supported.""" return bool(self.supported_features & SUPPORT_SELECT_SOURCE) @property def support_select_sound_mode(self): """Boolean if select sound mode command supported.""" return bool(self.supported_features & SUPPORT_SELECT_SOUND_MODE) @property def support_clear_playlist(self): """Boolean if clear playlist command supported.""" return bool(self.supported_features & SUPPORT_CLEAR_PLAYLIST) @property def support_shuffle_set(self): """Boolean if shuffle is supported.""" return bool(self.supported_features & SUPPORT_SHUFFLE_SET) async def async_toggle(self): """Toggle the power on the media player.""" if hasattr(self, "toggle"): # pylint: disable=no-member await self.hass.async_add_job(self.toggle) return if self.state in [STATE_OFF, STATE_IDLE]: await self.async_turn_on() else: await self.async_turn_off() async def async_volume_up(self): """Turn volume up for media player. This method is a coroutine. """ if hasattr(self, "volume_up"): # pylint: disable=no-member await self.hass.async_add_job(self.volume_up) return if self.volume_level < 1 and self.supported_features & SUPPORT_VOLUME_SET: await self.async_set_volume_level(min(1, self.volume_level + 0.1)) async def async_volume_down(self): """Turn volume down for media player. This method is a coroutine. """ if hasattr(self, "volume_down"): # pylint: disable=no-member await self.hass.async_add_job(self.volume_down) return if self.volume_level > 0 and self.supported_features & SUPPORT_VOLUME_SET: await self.async_set_volume_level(max(0, self.volume_level - 0.1)) async def async_media_play_pause(self): """Play or pause the media player.""" if hasattr(self, "media_play_pause"): # pylint: disable=no-member await self.hass.async_add_job(self.media_play_pause) return if self.state == STATE_PLAYING: await self.async_media_pause() else: await self.async_media_play() @property def entity_picture(self): """Return image of the media playing.""" if self.state == STATE_OFF: return None if self.media_image_remotely_accessible: return self.media_image_url return self.media_image_local @property def media_image_local(self): """Return local url to media image.""" image_hash = self.media_image_hash if image_hash is None: return None return ( f"/api/media_player_proxy/{self.entity_id}?" f"token={self.access_token}&cache={image_hash}" ) @property def capability_attributes(self): """Return capability attributes.""" supported_features = self.supported_features or 0 data = {} if supported_features & SUPPORT_SELECT_SOURCE: source_list = self.source_list if source_list: data[ATTR_INPUT_SOURCE_LIST] = source_list if supported_features & SUPPORT_SELECT_SOUND_MODE: sound_mode_list = self.sound_mode_list if sound_mode_list: data[ATTR_SOUND_MODE_LIST] = sound_mode_list return data @property def state_attributes(self): """Return the state attributes.""" if self.state == STATE_OFF: return None state_attr = {} for attr in ATTR_TO_PROPERTY: value = getattr(self, attr) if value is not None: state_attr[attr] = value if self.media_image_remotely_accessible: state_attr["entity_picture_local"] = self.media_image_local return state_attr async def _async_fetch_image(hass, url): """Fetch image. Images are cached in memory (the images are typically 10-100kB in size). """ cache_images = ENTITY_IMAGE_CACHE[CACHE_IMAGES] cache_maxsize = ENTITY_IMAGE_CACHE[CACHE_MAXSIZE] if urlparse(url).hostname is None: url = f"{get_url(hass)}{url}" if url not in cache_images: cache_images[url] = {CACHE_LOCK: asyncio.Lock()} async with cache_images[url][CACHE_LOCK]: if CACHE_CONTENT in cache_images[url]: return cache_images[url][CACHE_CONTENT] content, content_type = (None, None) websession = async_get_clientsession(hass) try: with async_timeout.timeout(10): response = await websession.get(url) if response.status == HTTP_OK: content = await response.read() content_type = response.headers.get(CONTENT_TYPE) if content_type: content_type = content_type.split(";")[0] cache_images[url][CACHE_CONTENT] = content, content_type except asyncio.TimeoutError: pass while len(cache_images) > cache_maxsize: cache_images.popitem(last=False) return content, content_type class MediaPlayerImageView(HomeAssistantView): """Media player view to serve an image.""" requires_auth = False url = "/api/media_player_proxy/{entity_id}" name = "api:media_player:image" def __init__(self, component): """Initialize a media player view.""" self.component = component async def get(self, request: web.Request, entity_id: str) -> web.Response: """Start a get request.""" player = self.component.get_entity(entity_id) if player is None: status = HTTP_NOT_FOUND if request[KEY_AUTHENTICATED] else 401 return web.Response(status=status) authenticated = ( request[KEY_AUTHENTICATED] or request.query.get("token") == player.access_token ) if not authenticated: return web.Response(status=401) data, content_type = await player.async_get_media_image() if data is None: return web.Response(status=HTTP_INTERNAL_SERVER_ERROR) headers: LooseHeaders = {CACHE_CONTROL: "max-age=3600"} return web.Response(body=data, content_type=content_type, headers=headers) @websocket_api.async_response async def websocket_handle_thumbnail(hass, connection, msg): """Handle get media player cover command. Async friendly. """ component = hass.data[DOMAIN] player = component.get_entity(msg["entity_id"]) if player is None: connection.send_message( websocket_api.error_message( msg["id"], "entity_not_found", "Entity not found" ) ) return _LOGGER.warning( "The websocket command media_player_thumbnail is deprecated. Use /api/media_player_proxy instead." ) data, content_type = await player.async_get_media_image() if data is None: connection.send_message( websocket_api.error_message( msg["id"], "thumbnail_fetch_failed", "Failed to fetch thumbnail" ) ) return await connection.send_big_result( msg["id"], { "content_type": content_type, "content": base64.b64encode(data).decode("utf-8"), }, ) class MediaPlayerDevice(MediaPlayerEntity): """ABC for media player devices (for backwards compatibility).""" def __init_subclass__(cls, **kwargs): """Print deprecation warning.""" super().__init_subclass__(**kwargs) _LOGGER.warning( "MediaPlayerDevice is deprecated, modify %s to extend MediaPlayerEntity", cls.__name__, )
"""Tests for the SolarEdge config flow.""" import pytest from requests.exceptions import ConnectTimeout, HTTPError from homeassistant import data_entry_flow from homeassistant.components.solaredge import config_flow from homeassistant.components.solaredge.const import CONF_SITE_ID, DEFAULT_NAME from homeassistant.const import CONF_API_KEY, CONF_NAME from tests.async_mock import Mock, patch from tests.common import MockConfigEntry NAME = "solaredge site 1 2 3" SITE_ID = "1a2b3c4d5e6f7g8h" API_KEY = "a1b2c3d4e5f6g7h8" @pytest.fixture(name="test_api") def mock_controller(): """Mock a successful Solaredge API.""" api = Mock() api.get_details.return_value = {"details": {"status": "active"}} with patch("solaredge.Solaredge", return_value=api): yield api def init_config_flow(hass): """Init a configuration flow.""" flow = config_flow.SolarEdgeConfigFlow() flow.hass = hass return flow async def test_user(hass, test_api): """Test user config.""" flow = init_config_flow(hass) result = await flow.async_step_user() assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "user" # tets with all provided result = await flow.async_step_user( {CONF_NAME: NAME, CONF_API_KEY: API_KEY, CONF_SITE_ID: SITE_ID} ) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["title"] == "solaredge_site_1_2_3" assert result["data"][CONF_SITE_ID] == SITE_ID assert result["data"][CONF_API_KEY] == API_KEY async def test_import(hass, test_api): """Test import step.""" flow = init_config_flow(hass) # import with site_id and api_key result = await flow.async_step_import( {CONF_API_KEY: API_KEY, CONF_SITE_ID: SITE_ID} ) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["title"] == "solaredge" assert result["data"][CONF_SITE_ID] == SITE_ID assert result["data"][CONF_API_KEY] == API_KEY # import with all result = await flow.async_step_import( {CONF_API_KEY: API_KEY, CONF_SITE_ID: SITE_ID, CONF_NAME: NAME} ) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["title"] == "solaredge_site_1_2_3" assert result["data"][CONF_SITE_ID] == SITE_ID assert result["data"][CONF_API_KEY] == API_KEY async def test_abort_if_already_setup(hass, test_api): """Test we abort if the site_id is already setup.""" flow = init_config_flow(hass) MockConfigEntry( domain="solaredge", data={CONF_NAME: DEFAULT_NAME, CONF_SITE_ID: SITE_ID, CONF_API_KEY: API_KEY}, ).add_to_hass(hass) # import: Should fail, same SITE_ID result = await flow.async_step_import( {CONF_NAME: DEFAULT_NAME, CONF_SITE_ID: SITE_ID, CONF_API_KEY: API_KEY} ) assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT assert result["reason"] == "site_exists" # user: Should fail, same SITE_ID result = await flow.async_step_user( {CONF_NAME: "test", CONF_SITE_ID: SITE_ID, CONF_API_KEY: "test"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["errors"] == {CONF_SITE_ID: "site_exists"} async def test_asserts(hass, test_api): """Test the _site_in_configuration_exists method.""" flow = init_config_flow(hass) # test with inactive site test_api.get_details.return_value = {"details": {"status": "NOK"}} result = await flow.async_step_user( {CONF_NAME: NAME, CONF_API_KEY: API_KEY, CONF_SITE_ID: SITE_ID} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["errors"] == {CONF_SITE_ID: "site_not_active"} # test with api_failure test_api.get_details.return_value = {} result = await flow.async_step_user( {CONF_NAME: NAME, CONF_API_KEY: API_KEY, CONF_SITE_ID: SITE_ID} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["errors"] == {CONF_SITE_ID: "api_failure"} # test with ConnectionTimeout test_api.get_details.side_effect = ConnectTimeout() result = await flow.async_step_user( {CONF_NAME: NAME, CONF_API_KEY: API_KEY, CONF_SITE_ID: SITE_ID} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["errors"] == {CONF_SITE_ID: "could_not_connect"} # test with HTTPError test_api.get_details.side_effect = HTTPError() result = await flow.async_step_user( {CONF_NAME: NAME, CONF_API_KEY: API_KEY, CONF_SITE_ID: SITE_ID} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["errors"] == {CONF_SITE_ID: "could_not_connect"}
robbiet480/home-assistant
tests/components/solaredge/test_config_flow.py
homeassistant/components/media_player/__init__.py
"""Add support for the Xiaomi TVs.""" import logging import pymitv import voluptuous as vol from homeassistant.components.media_player import PLATFORM_SCHEMA, MediaPlayerEntity from homeassistant.components.media_player.const import ( SUPPORT_TURN_OFF, SUPPORT_TURN_ON, SUPPORT_VOLUME_STEP, ) from homeassistant.const import CONF_HOST, CONF_NAME, STATE_OFF, STATE_ON import homeassistant.helpers.config_validation as cv DEFAULT_NAME = "Xiaomi TV" _LOGGER = logging.getLogger(__name__) SUPPORT_XIAOMI_TV = SUPPORT_VOLUME_STEP | SUPPORT_TURN_ON | SUPPORT_TURN_OFF # No host is needed for configuration, however it can be set. PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( { vol.Optional(CONF_HOST): cv.string, vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string, } ) def setup_platform(hass, config, add_entities, discovery_info=None): """Set up the Xiaomi TV platform.""" # If a hostname is set. Discovery is skipped. host = config.get(CONF_HOST) name = config.get(CONF_NAME) if host is not None: # Check if there's a valid TV at the IP address. if not pymitv.Discover().check_ip(host): _LOGGER.error("Could not find Xiaomi TV with specified IP: %s", host) else: # Register TV with Home Assistant. add_entities([XiaomiTV(host, name)]) else: # Otherwise, discover TVs on network. add_entities(XiaomiTV(tv, DEFAULT_NAME) for tv in pymitv.Discover().scan()) class XiaomiTV(MediaPlayerEntity): """Represent the Xiaomi TV for Home Assistant.""" def __init__(self, ip, name): """Receive IP address and name to construct class.""" # Initialize the Xiaomi TV. self._tv = pymitv.TV(ip) # Default name value, only to be overridden by user. self._name = name self._state = STATE_OFF @property def name(self): """Return the display name of this TV.""" return self._name @property def state(self): """Return _state variable, containing the appropriate constant.""" return self._state @property def assumed_state(self): """Indicate that state is assumed.""" return True @property def supported_features(self): """Flag media player features that are supported.""" return SUPPORT_XIAOMI_TV def turn_off(self): """ Instruct the TV to turn sleep. This is done instead of turning off, because the TV won't accept any input when turned off. Thus, the user would be unable to turn the TV back on, unless it's done manually. """ if self._state != STATE_OFF: self._tv.sleep() self._state = STATE_OFF def turn_on(self): """Wake the TV back up from sleep.""" if self._state != STATE_ON: self._tv.wake() self._state = STATE_ON def volume_up(self): """Increase volume by one.""" self._tv.volume_up() def volume_down(self): """Decrease volume by one.""" self._tv.volume_down()
"""Tests for the SolarEdge config flow.""" import pytest from requests.exceptions import ConnectTimeout, HTTPError from homeassistant import data_entry_flow from homeassistant.components.solaredge import config_flow from homeassistant.components.solaredge.const import CONF_SITE_ID, DEFAULT_NAME from homeassistant.const import CONF_API_KEY, CONF_NAME from tests.async_mock import Mock, patch from tests.common import MockConfigEntry NAME = "solaredge site 1 2 3" SITE_ID = "1a2b3c4d5e6f7g8h" API_KEY = "a1b2c3d4e5f6g7h8" @pytest.fixture(name="test_api") def mock_controller(): """Mock a successful Solaredge API.""" api = Mock() api.get_details.return_value = {"details": {"status": "active"}} with patch("solaredge.Solaredge", return_value=api): yield api def init_config_flow(hass): """Init a configuration flow.""" flow = config_flow.SolarEdgeConfigFlow() flow.hass = hass return flow async def test_user(hass, test_api): """Test user config.""" flow = init_config_flow(hass) result = await flow.async_step_user() assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "user" # tets with all provided result = await flow.async_step_user( {CONF_NAME: NAME, CONF_API_KEY: API_KEY, CONF_SITE_ID: SITE_ID} ) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["title"] == "solaredge_site_1_2_3" assert result["data"][CONF_SITE_ID] == SITE_ID assert result["data"][CONF_API_KEY] == API_KEY async def test_import(hass, test_api): """Test import step.""" flow = init_config_flow(hass) # import with site_id and api_key result = await flow.async_step_import( {CONF_API_KEY: API_KEY, CONF_SITE_ID: SITE_ID} ) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["title"] == "solaredge" assert result["data"][CONF_SITE_ID] == SITE_ID assert result["data"][CONF_API_KEY] == API_KEY # import with all result = await flow.async_step_import( {CONF_API_KEY: API_KEY, CONF_SITE_ID: SITE_ID, CONF_NAME: NAME} ) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["title"] == "solaredge_site_1_2_3" assert result["data"][CONF_SITE_ID] == SITE_ID assert result["data"][CONF_API_KEY] == API_KEY async def test_abort_if_already_setup(hass, test_api): """Test we abort if the site_id is already setup.""" flow = init_config_flow(hass) MockConfigEntry( domain="solaredge", data={CONF_NAME: DEFAULT_NAME, CONF_SITE_ID: SITE_ID, CONF_API_KEY: API_KEY}, ).add_to_hass(hass) # import: Should fail, same SITE_ID result = await flow.async_step_import( {CONF_NAME: DEFAULT_NAME, CONF_SITE_ID: SITE_ID, CONF_API_KEY: API_KEY} ) assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT assert result["reason"] == "site_exists" # user: Should fail, same SITE_ID result = await flow.async_step_user( {CONF_NAME: "test", CONF_SITE_ID: SITE_ID, CONF_API_KEY: "test"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["errors"] == {CONF_SITE_ID: "site_exists"} async def test_asserts(hass, test_api): """Test the _site_in_configuration_exists method.""" flow = init_config_flow(hass) # test with inactive site test_api.get_details.return_value = {"details": {"status": "NOK"}} result = await flow.async_step_user( {CONF_NAME: NAME, CONF_API_KEY: API_KEY, CONF_SITE_ID: SITE_ID} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["errors"] == {CONF_SITE_ID: "site_not_active"} # test with api_failure test_api.get_details.return_value = {} result = await flow.async_step_user( {CONF_NAME: NAME, CONF_API_KEY: API_KEY, CONF_SITE_ID: SITE_ID} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["errors"] == {CONF_SITE_ID: "api_failure"} # test with ConnectionTimeout test_api.get_details.side_effect = ConnectTimeout() result = await flow.async_step_user( {CONF_NAME: NAME, CONF_API_KEY: API_KEY, CONF_SITE_ID: SITE_ID} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["errors"] == {CONF_SITE_ID: "could_not_connect"} # test with HTTPError test_api.get_details.side_effect = HTTPError() result = await flow.async_step_user( {CONF_NAME: NAME, CONF_API_KEY: API_KEY, CONF_SITE_ID: SITE_ID} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["errors"] == {CONF_SITE_ID: "could_not_connect"}
robbiet480/home-assistant
tests/components/solaredge/test_config_flow.py
homeassistant/components/xiaomi_tv/media_player.py
"""Time-based One Time Password auth module.""" import asyncio from io import BytesIO import logging from typing import Any, Dict, Optional, Tuple import voluptuous as vol from homeassistant.auth.models import User from homeassistant.core import HomeAssistant from . import ( MULTI_FACTOR_AUTH_MODULE_SCHEMA, MULTI_FACTOR_AUTH_MODULES, MultiFactorAuthModule, SetupFlow, ) REQUIREMENTS = ["pyotp==2.3.0", "PyQRCode==1.2.1"] CONFIG_SCHEMA = MULTI_FACTOR_AUTH_MODULE_SCHEMA.extend({}, extra=vol.PREVENT_EXTRA) STORAGE_VERSION = 1 STORAGE_KEY = "auth_module.totp" STORAGE_USERS = "users" STORAGE_USER_ID = "user_id" STORAGE_OTA_SECRET = "ota_secret" INPUT_FIELD_CODE = "code" DUMMY_SECRET = "FPPTH34D4E3MI2HG" _LOGGER = logging.getLogger(__name__) def _generate_qr_code(data: str) -> str: """Generate a base64 PNG string represent QR Code image of data.""" import pyqrcode # pylint: disable=import-outside-toplevel qr_code = pyqrcode.create(data) with BytesIO() as buffer: qr_code.svg(file=buffer, scale=4) return str( buffer.getvalue() .decode("ascii") .replace("\n", "") .replace( '<?xml version="1.0" encoding="UTF-8"?>' '<svg xmlns="http://www.w3.org/2000/svg"', "<svg", ) ) def _generate_secret_and_qr_code(username: str) -> Tuple[str, str, str]: """Generate a secret, url, and QR code.""" import pyotp # pylint: disable=import-outside-toplevel ota_secret = pyotp.random_base32() url = pyotp.totp.TOTP(ota_secret).provisioning_uri( username, issuer_name="Home Assistant" ) image = _generate_qr_code(url) return ota_secret, url, image @MULTI_FACTOR_AUTH_MODULES.register("totp") class TotpAuthModule(MultiFactorAuthModule): """Auth module validate time-based one time password.""" DEFAULT_TITLE = "Time-based One Time Password" MAX_RETRY_TIME = 5 def __init__(self, hass: HomeAssistant, config: Dict[str, Any]) -> None: """Initialize the user data store.""" super().__init__(hass, config) self._users: Optional[Dict[str, str]] = None self._user_store = hass.helpers.storage.Store( STORAGE_VERSION, STORAGE_KEY, private=True ) self._init_lock = asyncio.Lock() @property def input_schema(self) -> vol.Schema: """Validate login flow input data.""" return vol.Schema({INPUT_FIELD_CODE: str}) async def _async_load(self) -> None: """Load stored data.""" async with self._init_lock: if self._users is not None: return data = await self._user_store.async_load() if data is None: data = {STORAGE_USERS: {}} self._users = data.get(STORAGE_USERS, {}) async def _async_save(self) -> None: """Save data.""" await self._user_store.async_save({STORAGE_USERS: self._users}) def _add_ota_secret(self, user_id: str, secret: Optional[str] = None) -> str: """Create a ota_secret for user.""" import pyotp # pylint: disable=import-outside-toplevel ota_secret: str = secret or pyotp.random_base32() self._users[user_id] = ota_secret # type: ignore return ota_secret async def async_setup_flow(self, user_id: str) -> SetupFlow: """Return a data entry flow handler for setup module. Mfa module should extend SetupFlow """ user = await self.hass.auth.async_get_user(user_id) assert user is not None return TotpSetupFlow(self, self.input_schema, user) async def async_setup_user(self, user_id: str, setup_data: Any) -> str: """Set up auth module for user.""" if self._users is None: await self._async_load() result = await self.hass.async_add_executor_job( self._add_ota_secret, user_id, setup_data.get("secret") ) await self._async_save() return result async def async_depose_user(self, user_id: str) -> None: """Depose auth module for user.""" if self._users is None: await self._async_load() if self._users.pop(user_id, None): # type: ignore await self._async_save() async def async_is_user_setup(self, user_id: str) -> bool: """Return whether user is setup.""" if self._users is None: await self._async_load() return user_id in self._users # type: ignore async def async_validate(self, user_id: str, user_input: Dict[str, Any]) -> bool: """Return True if validation passed.""" if self._users is None: await self._async_load() # user_input has been validate in caller # set INPUT_FIELD_CODE as vol.Required is not user friendly return await self.hass.async_add_executor_job( self._validate_2fa, user_id, user_input.get(INPUT_FIELD_CODE, "") ) def _validate_2fa(self, user_id: str, code: str) -> bool: """Validate two factor authentication code.""" import pyotp # pylint: disable=import-outside-toplevel ota_secret = self._users.get(user_id) # type: ignore if ota_secret is None: # even we cannot find user, we still do verify # to make timing the same as if user was found. pyotp.TOTP(DUMMY_SECRET).verify(code, valid_window=1) return False return bool(pyotp.TOTP(ota_secret).verify(code, valid_window=1)) class TotpSetupFlow(SetupFlow): """Handler for the setup flow.""" def __init__( self, auth_module: TotpAuthModule, setup_schema: vol.Schema, user: User ) -> None: """Initialize the setup flow.""" super().__init__(auth_module, setup_schema, user.id) # to fix typing complaint self._auth_module: TotpAuthModule = auth_module self._user = user self._ota_secret: Optional[str] = None self._url = None # type Optional[str] self._image = None # type Optional[str] async def async_step_init( self, user_input: Optional[Dict[str, str]] = None ) -> Dict[str, Any]: """Handle the first step of setup flow. Return self.async_show_form(step_id='init') if user_input is None. Return self.async_create_entry(data={'result': result}) if finish. """ import pyotp # pylint: disable=import-outside-toplevel errors: Dict[str, str] = {} if user_input: verified = await self.hass.async_add_executor_job( # type: ignore pyotp.TOTP(self._ota_secret).verify, user_input["code"] ) if verified: result = await self._auth_module.async_setup_user( self._user_id, {"secret": self._ota_secret} ) return self.async_create_entry( title=self._auth_module.name, data={"result": result} ) errors["base"] = "invalid_code" else: hass = self._auth_module.hass ( self._ota_secret, self._url, self._image, ) = await hass.async_add_executor_job( _generate_secret_and_qr_code, # type: ignore str(self._user.name), ) return self.async_show_form( step_id="init", data_schema=self._setup_schema, description_placeholders={ "code": self._ota_secret, "url": self._url, "qr_code": self._image, }, errors=errors, )
"""Tests for the SolarEdge config flow.""" import pytest from requests.exceptions import ConnectTimeout, HTTPError from homeassistant import data_entry_flow from homeassistant.components.solaredge import config_flow from homeassistant.components.solaredge.const import CONF_SITE_ID, DEFAULT_NAME from homeassistant.const import CONF_API_KEY, CONF_NAME from tests.async_mock import Mock, patch from tests.common import MockConfigEntry NAME = "solaredge site 1 2 3" SITE_ID = "1a2b3c4d5e6f7g8h" API_KEY = "a1b2c3d4e5f6g7h8" @pytest.fixture(name="test_api") def mock_controller(): """Mock a successful Solaredge API.""" api = Mock() api.get_details.return_value = {"details": {"status": "active"}} with patch("solaredge.Solaredge", return_value=api): yield api def init_config_flow(hass): """Init a configuration flow.""" flow = config_flow.SolarEdgeConfigFlow() flow.hass = hass return flow async def test_user(hass, test_api): """Test user config.""" flow = init_config_flow(hass) result = await flow.async_step_user() assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "user" # tets with all provided result = await flow.async_step_user( {CONF_NAME: NAME, CONF_API_KEY: API_KEY, CONF_SITE_ID: SITE_ID} ) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["title"] == "solaredge_site_1_2_3" assert result["data"][CONF_SITE_ID] == SITE_ID assert result["data"][CONF_API_KEY] == API_KEY async def test_import(hass, test_api): """Test import step.""" flow = init_config_flow(hass) # import with site_id and api_key result = await flow.async_step_import( {CONF_API_KEY: API_KEY, CONF_SITE_ID: SITE_ID} ) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["title"] == "solaredge" assert result["data"][CONF_SITE_ID] == SITE_ID assert result["data"][CONF_API_KEY] == API_KEY # import with all result = await flow.async_step_import( {CONF_API_KEY: API_KEY, CONF_SITE_ID: SITE_ID, CONF_NAME: NAME} ) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["title"] == "solaredge_site_1_2_3" assert result["data"][CONF_SITE_ID] == SITE_ID assert result["data"][CONF_API_KEY] == API_KEY async def test_abort_if_already_setup(hass, test_api): """Test we abort if the site_id is already setup.""" flow = init_config_flow(hass) MockConfigEntry( domain="solaredge", data={CONF_NAME: DEFAULT_NAME, CONF_SITE_ID: SITE_ID, CONF_API_KEY: API_KEY}, ).add_to_hass(hass) # import: Should fail, same SITE_ID result = await flow.async_step_import( {CONF_NAME: DEFAULT_NAME, CONF_SITE_ID: SITE_ID, CONF_API_KEY: API_KEY} ) assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT assert result["reason"] == "site_exists" # user: Should fail, same SITE_ID result = await flow.async_step_user( {CONF_NAME: "test", CONF_SITE_ID: SITE_ID, CONF_API_KEY: "test"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["errors"] == {CONF_SITE_ID: "site_exists"} async def test_asserts(hass, test_api): """Test the _site_in_configuration_exists method.""" flow = init_config_flow(hass) # test with inactive site test_api.get_details.return_value = {"details": {"status": "NOK"}} result = await flow.async_step_user( {CONF_NAME: NAME, CONF_API_KEY: API_KEY, CONF_SITE_ID: SITE_ID} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["errors"] == {CONF_SITE_ID: "site_not_active"} # test with api_failure test_api.get_details.return_value = {} result = await flow.async_step_user( {CONF_NAME: NAME, CONF_API_KEY: API_KEY, CONF_SITE_ID: SITE_ID} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["errors"] == {CONF_SITE_ID: "api_failure"} # test with ConnectionTimeout test_api.get_details.side_effect = ConnectTimeout() result = await flow.async_step_user( {CONF_NAME: NAME, CONF_API_KEY: API_KEY, CONF_SITE_ID: SITE_ID} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["errors"] == {CONF_SITE_ID: "could_not_connect"} # test with HTTPError test_api.get_details.side_effect = HTTPError() result = await flow.async_step_user( {CONF_NAME: NAME, CONF_API_KEY: API_KEY, CONF_SITE_ID: SITE_ID} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["errors"] == {CONF_SITE_ID: "could_not_connect"}
robbiet480/home-assistant
tests/components/solaredge/test_config_flow.py
homeassistant/auth/mfa_modules/totp.py
"""Support for MAX! binary sensors via MAX! Cube.""" import logging from homeassistant.components.binary_sensor import BinarySensorEntity from . import DATA_KEY _LOGGER = logging.getLogger(__name__) def setup_platform(hass, config, add_entities, discovery_info=None): """Iterate through all MAX! Devices and add window shutters.""" devices = [] for handler in hass.data[DATA_KEY].values(): cube = handler.cube for device in cube.devices: name = f"{cube.room_by_id(device.room_id).name} {device.name}" # Only add Window Shutters if cube.is_windowshutter(device): devices.append(MaxCubeShutter(handler, name, device.rf_address)) if devices: add_entities(devices) class MaxCubeShutter(BinarySensorEntity): """Representation of a MAX! Cube Binary Sensor device.""" def __init__(self, handler, name, rf_address): """Initialize MAX! Cube BinarySensorEntity.""" self._name = name self._sensor_type = "window" self._rf_address = rf_address self._cubehandle = handler self._state = None @property def should_poll(self): """Return the polling state.""" return True @property def name(self): """Return the name of the BinarySensorEntity.""" return self._name @property def device_class(self): """Return the class of this sensor.""" return self._sensor_type @property def is_on(self): """Return true if the binary sensor is on/open.""" return self._state def update(self): """Get latest data from MAX! Cube.""" self._cubehandle.update() device = self._cubehandle.cube.device_by_rf(self._rf_address) self._state = device.is_open
"""Tests for the SolarEdge config flow.""" import pytest from requests.exceptions import ConnectTimeout, HTTPError from homeassistant import data_entry_flow from homeassistant.components.solaredge import config_flow from homeassistant.components.solaredge.const import CONF_SITE_ID, DEFAULT_NAME from homeassistant.const import CONF_API_KEY, CONF_NAME from tests.async_mock import Mock, patch from tests.common import MockConfigEntry NAME = "solaredge site 1 2 3" SITE_ID = "1a2b3c4d5e6f7g8h" API_KEY = "a1b2c3d4e5f6g7h8" @pytest.fixture(name="test_api") def mock_controller(): """Mock a successful Solaredge API.""" api = Mock() api.get_details.return_value = {"details": {"status": "active"}} with patch("solaredge.Solaredge", return_value=api): yield api def init_config_flow(hass): """Init a configuration flow.""" flow = config_flow.SolarEdgeConfigFlow() flow.hass = hass return flow async def test_user(hass, test_api): """Test user config.""" flow = init_config_flow(hass) result = await flow.async_step_user() assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "user" # tets with all provided result = await flow.async_step_user( {CONF_NAME: NAME, CONF_API_KEY: API_KEY, CONF_SITE_ID: SITE_ID} ) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["title"] == "solaredge_site_1_2_3" assert result["data"][CONF_SITE_ID] == SITE_ID assert result["data"][CONF_API_KEY] == API_KEY async def test_import(hass, test_api): """Test import step.""" flow = init_config_flow(hass) # import with site_id and api_key result = await flow.async_step_import( {CONF_API_KEY: API_KEY, CONF_SITE_ID: SITE_ID} ) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["title"] == "solaredge" assert result["data"][CONF_SITE_ID] == SITE_ID assert result["data"][CONF_API_KEY] == API_KEY # import with all result = await flow.async_step_import( {CONF_API_KEY: API_KEY, CONF_SITE_ID: SITE_ID, CONF_NAME: NAME} ) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["title"] == "solaredge_site_1_2_3" assert result["data"][CONF_SITE_ID] == SITE_ID assert result["data"][CONF_API_KEY] == API_KEY async def test_abort_if_already_setup(hass, test_api): """Test we abort if the site_id is already setup.""" flow = init_config_flow(hass) MockConfigEntry( domain="solaredge", data={CONF_NAME: DEFAULT_NAME, CONF_SITE_ID: SITE_ID, CONF_API_KEY: API_KEY}, ).add_to_hass(hass) # import: Should fail, same SITE_ID result = await flow.async_step_import( {CONF_NAME: DEFAULT_NAME, CONF_SITE_ID: SITE_ID, CONF_API_KEY: API_KEY} ) assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT assert result["reason"] == "site_exists" # user: Should fail, same SITE_ID result = await flow.async_step_user( {CONF_NAME: "test", CONF_SITE_ID: SITE_ID, CONF_API_KEY: "test"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["errors"] == {CONF_SITE_ID: "site_exists"} async def test_asserts(hass, test_api): """Test the _site_in_configuration_exists method.""" flow = init_config_flow(hass) # test with inactive site test_api.get_details.return_value = {"details": {"status": "NOK"}} result = await flow.async_step_user( {CONF_NAME: NAME, CONF_API_KEY: API_KEY, CONF_SITE_ID: SITE_ID} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["errors"] == {CONF_SITE_ID: "site_not_active"} # test with api_failure test_api.get_details.return_value = {} result = await flow.async_step_user( {CONF_NAME: NAME, CONF_API_KEY: API_KEY, CONF_SITE_ID: SITE_ID} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["errors"] == {CONF_SITE_ID: "api_failure"} # test with ConnectionTimeout test_api.get_details.side_effect = ConnectTimeout() result = await flow.async_step_user( {CONF_NAME: NAME, CONF_API_KEY: API_KEY, CONF_SITE_ID: SITE_ID} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["errors"] == {CONF_SITE_ID: "could_not_connect"} # test with HTTPError test_api.get_details.side_effect = HTTPError() result = await flow.async_step_user( {CONF_NAME: NAME, CONF_API_KEY: API_KEY, CONF_SITE_ID: SITE_ID} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["errors"] == {CONF_SITE_ID: "could_not_connect"}
robbiet480/home-assistant
tests/components/solaredge/test_config_flow.py
homeassistant/components/maxcube/binary_sensor.py
"""Support for Xiaomi Miio.""" import logging from homeassistant import config_entries, core from homeassistant.const import CONF_HOST, CONF_TOKEN from homeassistant.helpers import device_registry as dr from .config_flow import CONF_FLOW_TYPE, CONF_GATEWAY from .const import DOMAIN from .gateway import ConnectXiaomiGateway _LOGGER = logging.getLogger(__name__) GATEWAY_PLATFORMS = ["alarm_control_panel"] async def async_setup(hass: core.HomeAssistant, config: dict): """Set up the Xiaomi Miio component.""" return True async def async_setup_entry( hass: core.HomeAssistant, entry: config_entries.ConfigEntry ): """Set up the Xiaomi Miio components from a config entry.""" hass.data[DOMAIN] = {} if entry.data[CONF_FLOW_TYPE] == CONF_GATEWAY: if not await async_setup_gateway_entry(hass, entry): return False return True async def async_setup_gateway_entry( hass: core.HomeAssistant, entry: config_entries.ConfigEntry ): """Set up the Xiaomi Gateway component from a config entry.""" host = entry.data[CONF_HOST] token = entry.data[CONF_TOKEN] name = entry.title gateway_id = entry.unique_id # For backwards compat if entry.unique_id.endswith("-gateway"): hass.config_entries.async_update_entry(entry, unique_id=entry.data["mac"]) # Connect to gateway gateway = ConnectXiaomiGateway(hass) if not await gateway.async_connect_gateway(host, token): return False gateway_info = gateway.gateway_info hass.data[DOMAIN][entry.entry_id] = gateway.gateway_device gateway_model = f"{gateway_info.model}-{gateway_info.hardware_version}" device_registry = await dr.async_get_registry(hass) device_registry.async_get_or_create( config_entry_id=entry.entry_id, connections={(dr.CONNECTION_NETWORK_MAC, gateway_info.mac_address)}, identifiers={(DOMAIN, gateway_id)}, manufacturer="Xiaomi", name=name, model=gateway_model, sw_version=gateway_info.firmware_version, ) for component in GATEWAY_PLATFORMS: hass.async_create_task( hass.config_entries.async_forward_entry_setup(entry, component) ) return True
"""Tests for the SolarEdge config flow.""" import pytest from requests.exceptions import ConnectTimeout, HTTPError from homeassistant import data_entry_flow from homeassistant.components.solaredge import config_flow from homeassistant.components.solaredge.const import CONF_SITE_ID, DEFAULT_NAME from homeassistant.const import CONF_API_KEY, CONF_NAME from tests.async_mock import Mock, patch from tests.common import MockConfigEntry NAME = "solaredge site 1 2 3" SITE_ID = "1a2b3c4d5e6f7g8h" API_KEY = "a1b2c3d4e5f6g7h8" @pytest.fixture(name="test_api") def mock_controller(): """Mock a successful Solaredge API.""" api = Mock() api.get_details.return_value = {"details": {"status": "active"}} with patch("solaredge.Solaredge", return_value=api): yield api def init_config_flow(hass): """Init a configuration flow.""" flow = config_flow.SolarEdgeConfigFlow() flow.hass = hass return flow async def test_user(hass, test_api): """Test user config.""" flow = init_config_flow(hass) result = await flow.async_step_user() assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "user" # tets with all provided result = await flow.async_step_user( {CONF_NAME: NAME, CONF_API_KEY: API_KEY, CONF_SITE_ID: SITE_ID} ) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["title"] == "solaredge_site_1_2_3" assert result["data"][CONF_SITE_ID] == SITE_ID assert result["data"][CONF_API_KEY] == API_KEY async def test_import(hass, test_api): """Test import step.""" flow = init_config_flow(hass) # import with site_id and api_key result = await flow.async_step_import( {CONF_API_KEY: API_KEY, CONF_SITE_ID: SITE_ID} ) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["title"] == "solaredge" assert result["data"][CONF_SITE_ID] == SITE_ID assert result["data"][CONF_API_KEY] == API_KEY # import with all result = await flow.async_step_import( {CONF_API_KEY: API_KEY, CONF_SITE_ID: SITE_ID, CONF_NAME: NAME} ) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["title"] == "solaredge_site_1_2_3" assert result["data"][CONF_SITE_ID] == SITE_ID assert result["data"][CONF_API_KEY] == API_KEY async def test_abort_if_already_setup(hass, test_api): """Test we abort if the site_id is already setup.""" flow = init_config_flow(hass) MockConfigEntry( domain="solaredge", data={CONF_NAME: DEFAULT_NAME, CONF_SITE_ID: SITE_ID, CONF_API_KEY: API_KEY}, ).add_to_hass(hass) # import: Should fail, same SITE_ID result = await flow.async_step_import( {CONF_NAME: DEFAULT_NAME, CONF_SITE_ID: SITE_ID, CONF_API_KEY: API_KEY} ) assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT assert result["reason"] == "site_exists" # user: Should fail, same SITE_ID result = await flow.async_step_user( {CONF_NAME: "test", CONF_SITE_ID: SITE_ID, CONF_API_KEY: "test"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["errors"] == {CONF_SITE_ID: "site_exists"} async def test_asserts(hass, test_api): """Test the _site_in_configuration_exists method.""" flow = init_config_flow(hass) # test with inactive site test_api.get_details.return_value = {"details": {"status": "NOK"}} result = await flow.async_step_user( {CONF_NAME: NAME, CONF_API_KEY: API_KEY, CONF_SITE_ID: SITE_ID} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["errors"] == {CONF_SITE_ID: "site_not_active"} # test with api_failure test_api.get_details.return_value = {} result = await flow.async_step_user( {CONF_NAME: NAME, CONF_API_KEY: API_KEY, CONF_SITE_ID: SITE_ID} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["errors"] == {CONF_SITE_ID: "api_failure"} # test with ConnectionTimeout test_api.get_details.side_effect = ConnectTimeout() result = await flow.async_step_user( {CONF_NAME: NAME, CONF_API_KEY: API_KEY, CONF_SITE_ID: SITE_ID} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["errors"] == {CONF_SITE_ID: "could_not_connect"} # test with HTTPError test_api.get_details.side_effect = HTTPError() result = await flow.async_step_user( {CONF_NAME: NAME, CONF_API_KEY: API_KEY, CONF_SITE_ID: SITE_ID} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["errors"] == {CONF_SITE_ID: "could_not_connect"}
robbiet480/home-assistant
tests/components/solaredge/test_config_flow.py
homeassistant/components/xiaomi_miio/__init__.py
"""Support for switching devices via Pilight to on and off.""" import logging import voluptuous as vol from homeassistant.components.light import ( ATTR_BRIGHTNESS, PLATFORM_SCHEMA, SUPPORT_BRIGHTNESS, LightEntity, ) from homeassistant.const import CONF_LIGHTS import homeassistant.helpers.config_validation as cv from .base_class import SWITCHES_SCHEMA, PilightBaseDevice from .const import CONF_DIMLEVEL_MAX, CONF_DIMLEVEL_MIN _LOGGER = logging.getLogger(__name__) LIGHTS_SCHEMA = SWITCHES_SCHEMA.extend( { vol.Optional(CONF_DIMLEVEL_MIN, default=0): cv.positive_int, vol.Optional(CONF_DIMLEVEL_MAX, default=15): cv.positive_int, } ) PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( {vol.Required(CONF_LIGHTS): vol.Schema({cv.string: LIGHTS_SCHEMA})} ) def setup_platform(hass, config, add_entities, discovery_info=None): """Set up the Pilight platform.""" switches = config.get(CONF_LIGHTS) devices = [] for dev_name, dev_config in switches.items(): devices.append(PilightLight(hass, dev_name, dev_config)) add_entities(devices) class PilightLight(PilightBaseDevice, LightEntity): """Representation of a Pilight switch.""" def __init__(self, hass, name, config): """Initialize a switch.""" super().__init__(hass, name, config) self._dimlevel_min = config.get(CONF_DIMLEVEL_MIN) self._dimlevel_max = config.get(CONF_DIMLEVEL_MAX) @property def brightness(self): """Return the brightness.""" return self._brightness @property def supported_features(self): """Flag supported features.""" return SUPPORT_BRIGHTNESS def turn_on(self, **kwargs): """Turn the switch on by calling pilight.send service with on code.""" self._brightness = kwargs.get(ATTR_BRIGHTNESS, 255) dimlevel = int(self._brightness / (255 / self._dimlevel_max)) self.set_state(turn_on=True, dimlevel=dimlevel)
"""Tests for the SolarEdge config flow.""" import pytest from requests.exceptions import ConnectTimeout, HTTPError from homeassistant import data_entry_flow from homeassistant.components.solaredge import config_flow from homeassistant.components.solaredge.const import CONF_SITE_ID, DEFAULT_NAME from homeassistant.const import CONF_API_KEY, CONF_NAME from tests.async_mock import Mock, patch from tests.common import MockConfigEntry NAME = "solaredge site 1 2 3" SITE_ID = "1a2b3c4d5e6f7g8h" API_KEY = "a1b2c3d4e5f6g7h8" @pytest.fixture(name="test_api") def mock_controller(): """Mock a successful Solaredge API.""" api = Mock() api.get_details.return_value = {"details": {"status": "active"}} with patch("solaredge.Solaredge", return_value=api): yield api def init_config_flow(hass): """Init a configuration flow.""" flow = config_flow.SolarEdgeConfigFlow() flow.hass = hass return flow async def test_user(hass, test_api): """Test user config.""" flow = init_config_flow(hass) result = await flow.async_step_user() assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "user" # tets with all provided result = await flow.async_step_user( {CONF_NAME: NAME, CONF_API_KEY: API_KEY, CONF_SITE_ID: SITE_ID} ) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["title"] == "solaredge_site_1_2_3" assert result["data"][CONF_SITE_ID] == SITE_ID assert result["data"][CONF_API_KEY] == API_KEY async def test_import(hass, test_api): """Test import step.""" flow = init_config_flow(hass) # import with site_id and api_key result = await flow.async_step_import( {CONF_API_KEY: API_KEY, CONF_SITE_ID: SITE_ID} ) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["title"] == "solaredge" assert result["data"][CONF_SITE_ID] == SITE_ID assert result["data"][CONF_API_KEY] == API_KEY # import with all result = await flow.async_step_import( {CONF_API_KEY: API_KEY, CONF_SITE_ID: SITE_ID, CONF_NAME: NAME} ) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["title"] == "solaredge_site_1_2_3" assert result["data"][CONF_SITE_ID] == SITE_ID assert result["data"][CONF_API_KEY] == API_KEY async def test_abort_if_already_setup(hass, test_api): """Test we abort if the site_id is already setup.""" flow = init_config_flow(hass) MockConfigEntry( domain="solaredge", data={CONF_NAME: DEFAULT_NAME, CONF_SITE_ID: SITE_ID, CONF_API_KEY: API_KEY}, ).add_to_hass(hass) # import: Should fail, same SITE_ID result = await flow.async_step_import( {CONF_NAME: DEFAULT_NAME, CONF_SITE_ID: SITE_ID, CONF_API_KEY: API_KEY} ) assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT assert result["reason"] == "site_exists" # user: Should fail, same SITE_ID result = await flow.async_step_user( {CONF_NAME: "test", CONF_SITE_ID: SITE_ID, CONF_API_KEY: "test"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["errors"] == {CONF_SITE_ID: "site_exists"} async def test_asserts(hass, test_api): """Test the _site_in_configuration_exists method.""" flow = init_config_flow(hass) # test with inactive site test_api.get_details.return_value = {"details": {"status": "NOK"}} result = await flow.async_step_user( {CONF_NAME: NAME, CONF_API_KEY: API_KEY, CONF_SITE_ID: SITE_ID} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["errors"] == {CONF_SITE_ID: "site_not_active"} # test with api_failure test_api.get_details.return_value = {} result = await flow.async_step_user( {CONF_NAME: NAME, CONF_API_KEY: API_KEY, CONF_SITE_ID: SITE_ID} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["errors"] == {CONF_SITE_ID: "api_failure"} # test with ConnectionTimeout test_api.get_details.side_effect = ConnectTimeout() result = await flow.async_step_user( {CONF_NAME: NAME, CONF_API_KEY: API_KEY, CONF_SITE_ID: SITE_ID} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["errors"] == {CONF_SITE_ID: "could_not_connect"} # test with HTTPError test_api.get_details.side_effect = HTTPError() result = await flow.async_step_user( {CONF_NAME: NAME, CONF_API_KEY: API_KEY, CONF_SITE_ID: SITE_ID} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["errors"] == {CONF_SITE_ID: "could_not_connect"}
robbiet480/home-assistant
tests/components/solaredge/test_config_flow.py
homeassistant/components/pilight/light.py
"""Support for Fibaro lights.""" import asyncio from functools import partial import logging from homeassistant.components.light import ( ATTR_BRIGHTNESS, ATTR_HS_COLOR, ATTR_WHITE_VALUE, DOMAIN, SUPPORT_BRIGHTNESS, SUPPORT_COLOR, SUPPORT_WHITE_VALUE, LightEntity, ) from homeassistant.const import CONF_WHITE_VALUE import homeassistant.util.color as color_util from . import CONF_COLOR, CONF_DIMMING, CONF_RESET_COLOR, FIBARO_DEVICES, FibaroDevice _LOGGER = logging.getLogger(__name__) def scaleto255(value): """Scale the input value from 0-100 to 0-255.""" # Fibaro has a funny way of storing brightness either 0-100 or 0-99 # depending on device type (e.g. dimmer vs led) if value > 98: value = 100 return max(0, min(255, ((value * 255.0) / 100.0))) def scaleto100(value): """Scale the input value from 0-255 to 0-100.""" # Make sure a low but non-zero value is not rounded down to zero if 0 < value < 3: return 1 return max(0, min(100, ((value * 100.0) / 255.0))) async def async_setup_platform(hass, config, async_add_entities, discovery_info=None): """Perform the setup for Fibaro controller devices.""" if discovery_info is None: return async_add_entities( [FibaroLight(device) for device in hass.data[FIBARO_DEVICES]["light"]], True ) class FibaroLight(FibaroDevice, LightEntity): """Representation of a Fibaro Light, including dimmable.""" def __init__(self, fibaro_device): """Initialize the light.""" self._brightness = None self._color = (0, 0) self._last_brightness = 0 self._supported_flags = 0 self._update_lock = asyncio.Lock() self._white = 0 devconf = fibaro_device.device_config self._reset_color = devconf.get(CONF_RESET_COLOR, False) supports_color = ( "color" in fibaro_device.properties and "setColor" in fibaro_device.actions ) supports_dimming = "levelChange" in fibaro_device.interfaces supports_white_v = "setW" in fibaro_device.actions # Configuration can override default capability detection if devconf.get(CONF_DIMMING, supports_dimming): self._supported_flags |= SUPPORT_BRIGHTNESS if devconf.get(CONF_COLOR, supports_color): self._supported_flags |= SUPPORT_COLOR if devconf.get(CONF_WHITE_VALUE, supports_white_v): self._supported_flags |= SUPPORT_WHITE_VALUE super().__init__(fibaro_device) self.entity_id = f"{DOMAIN}.{self.ha_id}" @property def brightness(self): """Return the brightness of the light.""" return scaleto255(self._brightness) @property def hs_color(self): """Return the color of the light.""" return self._color @property def white_value(self): """Return the white value of this light between 0..255.""" return self._white @property def supported_features(self): """Flag supported features.""" return self._supported_flags async def async_turn_on(self, **kwargs): """Turn the light on.""" async with self._update_lock: await self.hass.async_add_executor_job(partial(self._turn_on, **kwargs)) def _turn_on(self, **kwargs): """Really turn the light on.""" if self._supported_flags & SUPPORT_BRIGHTNESS: target_brightness = kwargs.get(ATTR_BRIGHTNESS) # No brightness specified, so we either restore it to # last brightness or switch it on at maximum level if target_brightness is None: if self._brightness == 0: if self._last_brightness: self._brightness = self._last_brightness else: self._brightness = 100 else: # We set it to the target brightness and turn it on self._brightness = scaleto100(target_brightness) if self._supported_flags & SUPPORT_COLOR: if ( self._reset_color and kwargs.get(ATTR_WHITE_VALUE) is None and kwargs.get(ATTR_HS_COLOR) is None and kwargs.get(ATTR_BRIGHTNESS) is None ): self._color = (100, 0) # Update based on parameters self._white = kwargs.get(ATTR_WHITE_VALUE, self._white) self._color = kwargs.get(ATTR_HS_COLOR, self._color) rgb = color_util.color_hs_to_RGB(*self._color) self.call_set_color( round(rgb[0] * self._brightness / 100.0), round(rgb[1] * self._brightness / 100.0), round(rgb[2] * self._brightness / 100.0), round(self._white * self._brightness / 100.0), ) if self.state == "off": self.set_level(int(self._brightness)) return if self._reset_color: bri255 = scaleto255(self._brightness) self.call_set_color(bri255, bri255, bri255, bri255) if self._supported_flags & SUPPORT_BRIGHTNESS: self.set_level(int(self._brightness)) return # The simplest case is left for last. No dimming, just switch on self.call_turn_on() async def async_turn_off(self, **kwargs): """Turn the light off.""" async with self._update_lock: await self.hass.async_add_executor_job(partial(self._turn_off, **kwargs)) def _turn_off(self, **kwargs): """Really turn the light off.""" # Let's save the last brightness level before we switch it off if ( (self._supported_flags & SUPPORT_BRIGHTNESS) and self._brightness and self._brightness > 0 ): self._last_brightness = self._brightness self._brightness = 0 self.call_turn_off() @property def is_on(self): """Return true if device is on.""" return self.current_binary_state async def async_update(self): """Update the state.""" async with self._update_lock: await self.hass.async_add_executor_job(self._update) def _update(self): """Really update the state.""" # Brightness handling if self._supported_flags & SUPPORT_BRIGHTNESS: self._brightness = float(self.fibaro_device.properties.value) # Fibaro might report 0-99 or 0-100 for brightness, # based on device type, so we round up here if self._brightness > 99: self._brightness = 100 # Color handling if ( self._supported_flags & SUPPORT_COLOR and "color" in self.fibaro_device.properties and "," in self.fibaro_device.properties.color ): # Fibaro communicates the color as an 'R, G, B, W' string rgbw_s = self.fibaro_device.properties.color if rgbw_s == "0,0,0,0" and "lastColorSet" in self.fibaro_device.properties: rgbw_s = self.fibaro_device.properties.lastColorSet rgbw_list = [int(i) for i in rgbw_s.split(",")][:4] if rgbw_list[0] or rgbw_list[1] or rgbw_list[2]: self._color = color_util.color_RGB_to_hs(*rgbw_list[:3]) if (self._supported_flags & SUPPORT_WHITE_VALUE) and self.brightness != 0: self._white = min(255, max(0, rgbw_list[3] * 100.0 / self._brightness))
"""Tests for the SolarEdge config flow.""" import pytest from requests.exceptions import ConnectTimeout, HTTPError from homeassistant import data_entry_flow from homeassistant.components.solaredge import config_flow from homeassistant.components.solaredge.const import CONF_SITE_ID, DEFAULT_NAME from homeassistant.const import CONF_API_KEY, CONF_NAME from tests.async_mock import Mock, patch from tests.common import MockConfigEntry NAME = "solaredge site 1 2 3" SITE_ID = "1a2b3c4d5e6f7g8h" API_KEY = "a1b2c3d4e5f6g7h8" @pytest.fixture(name="test_api") def mock_controller(): """Mock a successful Solaredge API.""" api = Mock() api.get_details.return_value = {"details": {"status": "active"}} with patch("solaredge.Solaredge", return_value=api): yield api def init_config_flow(hass): """Init a configuration flow.""" flow = config_flow.SolarEdgeConfigFlow() flow.hass = hass return flow async def test_user(hass, test_api): """Test user config.""" flow = init_config_flow(hass) result = await flow.async_step_user() assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "user" # tets with all provided result = await flow.async_step_user( {CONF_NAME: NAME, CONF_API_KEY: API_KEY, CONF_SITE_ID: SITE_ID} ) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["title"] == "solaredge_site_1_2_3" assert result["data"][CONF_SITE_ID] == SITE_ID assert result["data"][CONF_API_KEY] == API_KEY async def test_import(hass, test_api): """Test import step.""" flow = init_config_flow(hass) # import with site_id and api_key result = await flow.async_step_import( {CONF_API_KEY: API_KEY, CONF_SITE_ID: SITE_ID} ) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["title"] == "solaredge" assert result["data"][CONF_SITE_ID] == SITE_ID assert result["data"][CONF_API_KEY] == API_KEY # import with all result = await flow.async_step_import( {CONF_API_KEY: API_KEY, CONF_SITE_ID: SITE_ID, CONF_NAME: NAME} ) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["title"] == "solaredge_site_1_2_3" assert result["data"][CONF_SITE_ID] == SITE_ID assert result["data"][CONF_API_KEY] == API_KEY async def test_abort_if_already_setup(hass, test_api): """Test we abort if the site_id is already setup.""" flow = init_config_flow(hass) MockConfigEntry( domain="solaredge", data={CONF_NAME: DEFAULT_NAME, CONF_SITE_ID: SITE_ID, CONF_API_KEY: API_KEY}, ).add_to_hass(hass) # import: Should fail, same SITE_ID result = await flow.async_step_import( {CONF_NAME: DEFAULT_NAME, CONF_SITE_ID: SITE_ID, CONF_API_KEY: API_KEY} ) assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT assert result["reason"] == "site_exists" # user: Should fail, same SITE_ID result = await flow.async_step_user( {CONF_NAME: "test", CONF_SITE_ID: SITE_ID, CONF_API_KEY: "test"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["errors"] == {CONF_SITE_ID: "site_exists"} async def test_asserts(hass, test_api): """Test the _site_in_configuration_exists method.""" flow = init_config_flow(hass) # test with inactive site test_api.get_details.return_value = {"details": {"status": "NOK"}} result = await flow.async_step_user( {CONF_NAME: NAME, CONF_API_KEY: API_KEY, CONF_SITE_ID: SITE_ID} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["errors"] == {CONF_SITE_ID: "site_not_active"} # test with api_failure test_api.get_details.return_value = {} result = await flow.async_step_user( {CONF_NAME: NAME, CONF_API_KEY: API_KEY, CONF_SITE_ID: SITE_ID} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["errors"] == {CONF_SITE_ID: "api_failure"} # test with ConnectionTimeout test_api.get_details.side_effect = ConnectTimeout() result = await flow.async_step_user( {CONF_NAME: NAME, CONF_API_KEY: API_KEY, CONF_SITE_ID: SITE_ID} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["errors"] == {CONF_SITE_ID: "could_not_connect"} # test with HTTPError test_api.get_details.side_effect = HTTPError() result = await flow.async_step_user( {CONF_NAME: NAME, CONF_API_KEY: API_KEY, CONF_SITE_ID: SITE_ID} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["errors"] == {CONF_SITE_ID: "could_not_connect"}
robbiet480/home-assistant
tests/components/solaredge/test_config_flow.py
homeassistant/components/fibaro/light.py
"""Alexa state report code.""" import asyncio import json import logging import aiohttp import async_timeout from homeassistant.const import MATCH_ALL, STATE_ON import homeassistant.util.dt as dt_util from .const import API_CHANGE, Cause from .entities import ENTITY_ADAPTERS from .messages import AlexaResponse _LOGGER = logging.getLogger(__name__) DEFAULT_TIMEOUT = 10 async def async_enable_proactive_mode(hass, smart_home_config): """Enable the proactive mode. Proactive mode makes this component report state changes to Alexa. """ # Validate we can get access token. await smart_home_config.async_get_access_token() async def async_entity_state_listener(changed_entity, old_state, new_state): if not hass.is_running: return if not new_state: return if new_state.domain not in ENTITY_ADAPTERS: return if not smart_home_config.should_expose(changed_entity): _LOGGER.debug("Not exposing %s because filtered by config", changed_entity) return alexa_changed_entity = ENTITY_ADAPTERS[new_state.domain]( hass, smart_home_config, new_state ) for interface in alexa_changed_entity.interfaces(): if interface.properties_proactively_reported(): await async_send_changereport_message( hass, smart_home_config, alexa_changed_entity ) return if ( interface.name() == "Alexa.DoorbellEventSource" and new_state.state == STATE_ON ): await async_send_doorbell_event_message( hass, smart_home_config, alexa_changed_entity ) return return hass.helpers.event.async_track_state_change( MATCH_ALL, async_entity_state_listener ) async def async_send_changereport_message( hass, config, alexa_entity, *, invalidate_access_token=True ): """Send a ChangeReport message for an Alexa entity. https://developer.amazon.com/docs/smarthome/state-reporting-for-a-smart-home-skill.html#report-state-with-changereport-events """ token = await config.async_get_access_token() headers = {"Authorization": f"Bearer {token}"} endpoint = alexa_entity.alexa_id() # this sends all the properties of the Alexa Entity, whether they have # changed or not. this should be improved, and properties that have not # changed should be moved to the 'context' object properties = list(alexa_entity.serialize_properties()) payload = { API_CHANGE: {"cause": {"type": Cause.APP_INTERACTION}, "properties": properties} } message = AlexaResponse(name="ChangeReport", namespace="Alexa", payload=payload) message.set_endpoint_full(token, endpoint) message_serialized = message.serialize() session = hass.helpers.aiohttp_client.async_get_clientsession() try: with async_timeout.timeout(DEFAULT_TIMEOUT): response = await session.post( config.endpoint, headers=headers, json=message_serialized, allow_redirects=True, ) except (asyncio.TimeoutError, aiohttp.ClientError): _LOGGER.error("Timeout sending report to Alexa.") return response_text = await response.text() _LOGGER.debug("Sent: %s", json.dumps(message_serialized)) _LOGGER.debug("Received (%s): %s", response.status, response_text) if response.status == 202: return response_json = json.loads(response_text) if ( response_json["payload"]["code"] == "INVALID_ACCESS_TOKEN_EXCEPTION" and not invalidate_access_token ): config.async_invalidate_access_token() return await async_send_changereport_message( hass, config, alexa_entity, invalidate_access_token=False ) _LOGGER.error( "Error when sending ChangeReport to Alexa: %s: %s", response_json["payload"]["code"], response_json["payload"]["description"], ) async def async_send_add_or_update_message(hass, config, entity_ids): """Send an AddOrUpdateReport message for entities. https://developer.amazon.com/docs/device-apis/alexa-discovery.html#add-or-update-report """ token = await config.async_get_access_token() headers = {"Authorization": f"Bearer {token}"} endpoints = [] for entity_id in entity_ids: domain = entity_id.split(".", 1)[0] if domain not in ENTITY_ADAPTERS: continue alexa_entity = ENTITY_ADAPTERS[domain](hass, config, hass.states.get(entity_id)) endpoints.append(alexa_entity.serialize_discovery()) payload = {"endpoints": endpoints, "scope": {"type": "BearerToken", "token": token}} message = AlexaResponse( name="AddOrUpdateReport", namespace="Alexa.Discovery", payload=payload ) message_serialized = message.serialize() session = hass.helpers.aiohttp_client.async_get_clientsession() return await session.post( config.endpoint, headers=headers, json=message_serialized, allow_redirects=True ) async def async_send_delete_message(hass, config, entity_ids): """Send an DeleteReport message for entities. https://developer.amazon.com/docs/device-apis/alexa-discovery.html#deletereport-event """ token = await config.async_get_access_token() headers = {"Authorization": f"Bearer {token}"} endpoints = [] for entity_id in entity_ids: domain = entity_id.split(".", 1)[0] if domain not in ENTITY_ADAPTERS: continue alexa_entity = ENTITY_ADAPTERS[domain](hass, config, hass.states.get(entity_id)) endpoints.append({"endpointId": alexa_entity.alexa_id()}) payload = {"endpoints": endpoints, "scope": {"type": "BearerToken", "token": token}} message = AlexaResponse( name="DeleteReport", namespace="Alexa.Discovery", payload=payload ) message_serialized = message.serialize() session = hass.helpers.aiohttp_client.async_get_clientsession() return await session.post( config.endpoint, headers=headers, json=message_serialized, allow_redirects=True ) async def async_send_doorbell_event_message(hass, config, alexa_entity): """Send a DoorbellPress event message for an Alexa entity. https://developer.amazon.com/docs/smarthome/send-events-to-the-alexa-event-gateway.html """ token = await config.async_get_access_token() headers = {"Authorization": f"Bearer {token}"} endpoint = alexa_entity.alexa_id() message = AlexaResponse( name="DoorbellPress", namespace="Alexa.DoorbellEventSource", payload={ "cause": {"type": Cause.PHYSICAL_INTERACTION}, "timestamp": f"{dt_util.utcnow().replace(tzinfo=None).isoformat()}Z", }, ) message.set_endpoint_full(token, endpoint) message_serialized = message.serialize() session = hass.helpers.aiohttp_client.async_get_clientsession() try: with async_timeout.timeout(DEFAULT_TIMEOUT): response = await session.post( config.endpoint, headers=headers, json=message_serialized, allow_redirects=True, ) except (asyncio.TimeoutError, aiohttp.ClientError): _LOGGER.error("Timeout sending report to Alexa.") return response_text = await response.text() _LOGGER.debug("Sent: %s", json.dumps(message_serialized)) _LOGGER.debug("Received (%s): %s", response.status, response_text) if response.status == 202: return response_json = json.loads(response_text) _LOGGER.error( "Error when sending DoorbellPress event to Alexa: %s: %s", response_json["payload"]["code"], response_json["payload"]["description"], )
"""Tests for the SolarEdge config flow.""" import pytest from requests.exceptions import ConnectTimeout, HTTPError from homeassistant import data_entry_flow from homeassistant.components.solaredge import config_flow from homeassistant.components.solaredge.const import CONF_SITE_ID, DEFAULT_NAME from homeassistant.const import CONF_API_KEY, CONF_NAME from tests.async_mock import Mock, patch from tests.common import MockConfigEntry NAME = "solaredge site 1 2 3" SITE_ID = "1a2b3c4d5e6f7g8h" API_KEY = "a1b2c3d4e5f6g7h8" @pytest.fixture(name="test_api") def mock_controller(): """Mock a successful Solaredge API.""" api = Mock() api.get_details.return_value = {"details": {"status": "active"}} with patch("solaredge.Solaredge", return_value=api): yield api def init_config_flow(hass): """Init a configuration flow.""" flow = config_flow.SolarEdgeConfigFlow() flow.hass = hass return flow async def test_user(hass, test_api): """Test user config.""" flow = init_config_flow(hass) result = await flow.async_step_user() assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "user" # tets with all provided result = await flow.async_step_user( {CONF_NAME: NAME, CONF_API_KEY: API_KEY, CONF_SITE_ID: SITE_ID} ) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["title"] == "solaredge_site_1_2_3" assert result["data"][CONF_SITE_ID] == SITE_ID assert result["data"][CONF_API_KEY] == API_KEY async def test_import(hass, test_api): """Test import step.""" flow = init_config_flow(hass) # import with site_id and api_key result = await flow.async_step_import( {CONF_API_KEY: API_KEY, CONF_SITE_ID: SITE_ID} ) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["title"] == "solaredge" assert result["data"][CONF_SITE_ID] == SITE_ID assert result["data"][CONF_API_KEY] == API_KEY # import with all result = await flow.async_step_import( {CONF_API_KEY: API_KEY, CONF_SITE_ID: SITE_ID, CONF_NAME: NAME} ) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["title"] == "solaredge_site_1_2_3" assert result["data"][CONF_SITE_ID] == SITE_ID assert result["data"][CONF_API_KEY] == API_KEY async def test_abort_if_already_setup(hass, test_api): """Test we abort if the site_id is already setup.""" flow = init_config_flow(hass) MockConfigEntry( domain="solaredge", data={CONF_NAME: DEFAULT_NAME, CONF_SITE_ID: SITE_ID, CONF_API_KEY: API_KEY}, ).add_to_hass(hass) # import: Should fail, same SITE_ID result = await flow.async_step_import( {CONF_NAME: DEFAULT_NAME, CONF_SITE_ID: SITE_ID, CONF_API_KEY: API_KEY} ) assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT assert result["reason"] == "site_exists" # user: Should fail, same SITE_ID result = await flow.async_step_user( {CONF_NAME: "test", CONF_SITE_ID: SITE_ID, CONF_API_KEY: "test"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["errors"] == {CONF_SITE_ID: "site_exists"} async def test_asserts(hass, test_api): """Test the _site_in_configuration_exists method.""" flow = init_config_flow(hass) # test with inactive site test_api.get_details.return_value = {"details": {"status": "NOK"}} result = await flow.async_step_user( {CONF_NAME: NAME, CONF_API_KEY: API_KEY, CONF_SITE_ID: SITE_ID} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["errors"] == {CONF_SITE_ID: "site_not_active"} # test with api_failure test_api.get_details.return_value = {} result = await flow.async_step_user( {CONF_NAME: NAME, CONF_API_KEY: API_KEY, CONF_SITE_ID: SITE_ID} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["errors"] == {CONF_SITE_ID: "api_failure"} # test with ConnectionTimeout test_api.get_details.side_effect = ConnectTimeout() result = await flow.async_step_user( {CONF_NAME: NAME, CONF_API_KEY: API_KEY, CONF_SITE_ID: SITE_ID} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["errors"] == {CONF_SITE_ID: "could_not_connect"} # test with HTTPError test_api.get_details.side_effect = HTTPError() result = await flow.async_step_user( {CONF_NAME: NAME, CONF_API_KEY: API_KEY, CONF_SITE_ID: SITE_ID} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["errors"] == {CONF_SITE_ID: "could_not_connect"}
robbiet480/home-assistant
tests/components/solaredge/test_config_flow.py
homeassistant/components/alexa/state_report.py
"""Support for Nest Cameras.""" from datetime import timedelta import logging import requests from homeassistant.components import nest from homeassistant.components.camera import PLATFORM_SCHEMA, SUPPORT_ON_OFF, Camera from homeassistant.util.dt import utcnow _LOGGER = logging.getLogger(__name__) NEST_BRAND = "Nest" PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({}) def setup_platform(hass, config, add_entities, discovery_info=None): """Set up a Nest Cam. No longer in use. """ async def async_setup_entry(hass, entry, async_add_entities): """Set up a Nest sensor based on a config entry.""" camera_devices = await hass.async_add_job(hass.data[nest.DATA_NEST].cameras) cameras = [NestCamera(structure, device) for structure, device in camera_devices] async_add_entities(cameras, True) class NestCamera(Camera): """Representation of a Nest Camera.""" def __init__(self, structure, device): """Initialize a Nest Camera.""" super().__init__() self.structure = structure self.device = device self._location = None self._name = None self._online = None self._is_streaming = None self._is_video_history_enabled = False # Default to non-NestAware subscribed, but will be fixed during update self._time_between_snapshots = timedelta(seconds=30) self._last_image = None self._next_snapshot_at = None @property def name(self): """Return the name of the nest, if any.""" return self._name @property def unique_id(self): """Return the serial number.""" return self.device.device_id @property def device_info(self): """Return information about the device.""" return { "identifiers": {(nest.DOMAIN, self.device.device_id)}, "name": self.device.name_long, "manufacturer": "Nest Labs", "model": "Camera", } @property def should_poll(self): """Nest camera should poll periodically.""" return True @property def is_recording(self): """Return true if the device is recording.""" return self._is_streaming @property def brand(self): """Return the brand of the camera.""" return NEST_BRAND @property def supported_features(self): """Nest Cam support turn on and off.""" return SUPPORT_ON_OFF @property def is_on(self): """Return true if on.""" return self._online and self._is_streaming def turn_off(self): """Turn off camera.""" _LOGGER.debug("Turn off camera %s", self._name) # Calling Nest API in is_streaming setter. # device.is_streaming would not immediately change until the process # finished in Nest Cam. self.device.is_streaming = False def turn_on(self): """Turn on camera.""" if not self._online: _LOGGER.error("Camera %s is offline.", self._name) return _LOGGER.debug("Turn on camera %s", self._name) # Calling Nest API in is_streaming setter. # device.is_streaming would not immediately change until the process # finished in Nest Cam. self.device.is_streaming = True def update(self): """Cache value from Python-nest.""" self._location = self.device.where self._name = self.device.name self._online = self.device.online self._is_streaming = self.device.is_streaming self._is_video_history_enabled = self.device.is_video_history_enabled if self._is_video_history_enabled: # NestAware allowed 10/min self._time_between_snapshots = timedelta(seconds=6) else: # Otherwise, 2/min self._time_between_snapshots = timedelta(seconds=30) def _ready_for_snapshot(self, now): return self._next_snapshot_at is None or now > self._next_snapshot_at def camera_image(self): """Return a still image response from the camera.""" now = utcnow() if self._ready_for_snapshot(now): url = self.device.snapshot_url try: response = requests.get(url) except requests.exceptions.RequestException as error: _LOGGER.error("Error getting camera image: %s", error) return None self._next_snapshot_at = now + self._time_between_snapshots self._last_image = response.content return self._last_image
"""Tests for the SolarEdge config flow.""" import pytest from requests.exceptions import ConnectTimeout, HTTPError from homeassistant import data_entry_flow from homeassistant.components.solaredge import config_flow from homeassistant.components.solaredge.const import CONF_SITE_ID, DEFAULT_NAME from homeassistant.const import CONF_API_KEY, CONF_NAME from tests.async_mock import Mock, patch from tests.common import MockConfigEntry NAME = "solaredge site 1 2 3" SITE_ID = "1a2b3c4d5e6f7g8h" API_KEY = "a1b2c3d4e5f6g7h8" @pytest.fixture(name="test_api") def mock_controller(): """Mock a successful Solaredge API.""" api = Mock() api.get_details.return_value = {"details": {"status": "active"}} with patch("solaredge.Solaredge", return_value=api): yield api def init_config_flow(hass): """Init a configuration flow.""" flow = config_flow.SolarEdgeConfigFlow() flow.hass = hass return flow async def test_user(hass, test_api): """Test user config.""" flow = init_config_flow(hass) result = await flow.async_step_user() assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "user" # tets with all provided result = await flow.async_step_user( {CONF_NAME: NAME, CONF_API_KEY: API_KEY, CONF_SITE_ID: SITE_ID} ) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["title"] == "solaredge_site_1_2_3" assert result["data"][CONF_SITE_ID] == SITE_ID assert result["data"][CONF_API_KEY] == API_KEY async def test_import(hass, test_api): """Test import step.""" flow = init_config_flow(hass) # import with site_id and api_key result = await flow.async_step_import( {CONF_API_KEY: API_KEY, CONF_SITE_ID: SITE_ID} ) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["title"] == "solaredge" assert result["data"][CONF_SITE_ID] == SITE_ID assert result["data"][CONF_API_KEY] == API_KEY # import with all result = await flow.async_step_import( {CONF_API_KEY: API_KEY, CONF_SITE_ID: SITE_ID, CONF_NAME: NAME} ) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["title"] == "solaredge_site_1_2_3" assert result["data"][CONF_SITE_ID] == SITE_ID assert result["data"][CONF_API_KEY] == API_KEY async def test_abort_if_already_setup(hass, test_api): """Test we abort if the site_id is already setup.""" flow = init_config_flow(hass) MockConfigEntry( domain="solaredge", data={CONF_NAME: DEFAULT_NAME, CONF_SITE_ID: SITE_ID, CONF_API_KEY: API_KEY}, ).add_to_hass(hass) # import: Should fail, same SITE_ID result = await flow.async_step_import( {CONF_NAME: DEFAULT_NAME, CONF_SITE_ID: SITE_ID, CONF_API_KEY: API_KEY} ) assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT assert result["reason"] == "site_exists" # user: Should fail, same SITE_ID result = await flow.async_step_user( {CONF_NAME: "test", CONF_SITE_ID: SITE_ID, CONF_API_KEY: "test"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["errors"] == {CONF_SITE_ID: "site_exists"} async def test_asserts(hass, test_api): """Test the _site_in_configuration_exists method.""" flow = init_config_flow(hass) # test with inactive site test_api.get_details.return_value = {"details": {"status": "NOK"}} result = await flow.async_step_user( {CONF_NAME: NAME, CONF_API_KEY: API_KEY, CONF_SITE_ID: SITE_ID} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["errors"] == {CONF_SITE_ID: "site_not_active"} # test with api_failure test_api.get_details.return_value = {} result = await flow.async_step_user( {CONF_NAME: NAME, CONF_API_KEY: API_KEY, CONF_SITE_ID: SITE_ID} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["errors"] == {CONF_SITE_ID: "api_failure"} # test with ConnectionTimeout test_api.get_details.side_effect = ConnectTimeout() result = await flow.async_step_user( {CONF_NAME: NAME, CONF_API_KEY: API_KEY, CONF_SITE_ID: SITE_ID} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["errors"] == {CONF_SITE_ID: "could_not_connect"} # test with HTTPError test_api.get_details.side_effect = HTTPError() result = await flow.async_step_user( {CONF_NAME: NAME, CONF_API_KEY: API_KEY, CONF_SITE_ID: SITE_ID} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["errors"] == {CONF_SITE_ID: "could_not_connect"}
robbiet480/home-assistant
tests/components/solaredge/test_config_flow.py
homeassistant/components/nest/camera.py
"""Support for RFXtrx binary sensors.""" import logging import RFXtrx as rfxtrxmod import voluptuous as vol from homeassistant.components.binary_sensor import ( DEVICE_CLASSES_SCHEMA, PLATFORM_SCHEMA, BinarySensorEntity, ) from homeassistant.const import ( CONF_COMMAND_OFF, CONF_COMMAND_ON, CONF_DEVICE_CLASS, CONF_NAME, ) from homeassistant.helpers import config_validation as cv, event as evt from homeassistant.util import dt as dt_util, slugify from . import ( ATTR_NAME, CONF_AUTOMATIC_ADD, CONF_DATA_BITS, CONF_DEVICES, CONF_FIRE_EVENT, CONF_OFF_DELAY, RECEIVED_EVT_SUBSCRIBERS, RFX_DEVICES, apply_received_command, find_possible_pt2262_device, get_pt2262_cmd, get_pt2262_device, get_pt2262_deviceid, get_rfx_object, ) _LOGGER = logging.getLogger(__name__) PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( { vol.Optional(CONF_DEVICES, default={}): { cv.string: vol.Schema( { vol.Optional(CONF_NAME): cv.string, vol.Optional(CONF_DEVICE_CLASS): DEVICE_CLASSES_SCHEMA, vol.Optional(CONF_FIRE_EVENT, default=False): cv.boolean, vol.Optional(CONF_OFF_DELAY): vol.Any( cv.time_period, cv.positive_timedelta ), vol.Optional(CONF_DATA_BITS): cv.positive_int, vol.Optional(CONF_COMMAND_ON): cv.byte, vol.Optional(CONF_COMMAND_OFF): cv.byte, } ) }, vol.Optional(CONF_AUTOMATIC_ADD, default=False): cv.boolean, }, extra=vol.ALLOW_EXTRA, ) def setup_platform(hass, config, add_entities, discovery_info=None): """Set up the Binary Sensor platform to RFXtrx.""" sensors = [] for packet_id, entity in config[CONF_DEVICES].items(): event = get_rfx_object(packet_id) device_id = slugify(event.device.id_string.lower()) if device_id in RFX_DEVICES: continue if entity.get(CONF_DATA_BITS) is not None: _LOGGER.debug( "Masked device id: %s", get_pt2262_deviceid(device_id, entity.get(CONF_DATA_BITS)), ) _LOGGER.debug( "Add %s rfxtrx.binary_sensor (class %s)", entity[ATTR_NAME], entity.get(CONF_DEVICE_CLASS), ) device = RfxtrxBinarySensor( event, entity.get(CONF_NAME), entity.get(CONF_DEVICE_CLASS), entity[CONF_FIRE_EVENT], entity.get(CONF_OFF_DELAY), entity.get(CONF_DATA_BITS), entity.get(CONF_COMMAND_ON), entity.get(CONF_COMMAND_OFF), ) device.hass = hass sensors.append(device) RFX_DEVICES[device_id] = device add_entities(sensors) def binary_sensor_update(event): """Call for control updates from the RFXtrx gateway.""" if not isinstance(event, rfxtrxmod.ControlEvent): return device_id = slugify(event.device.id_string.lower()) sensor = RFX_DEVICES.get(device_id, get_pt2262_device(device_id)) if sensor is None: # Add the entity if not exists and automatic_add is True if not config[CONF_AUTOMATIC_ADD]: return if event.device.packettype == 0x13: poss_dev = find_possible_pt2262_device(device_id) if poss_dev is not None: poss_id = slugify(poss_dev.event.device.id_string.lower()) _LOGGER.debug("Found possible matching device ID: %s", poss_id) pkt_id = "".join(f"{x:02x}" for x in event.data) sensor = RfxtrxBinarySensor(event, pkt_id) sensor.hass = hass RFX_DEVICES[device_id] = sensor add_entities([sensor]) _LOGGER.info( "Added binary sensor %s (Device ID: %s Class: %s Sub: %s)", pkt_id, slugify(event.device.id_string.lower()), event.device.__class__.__name__, event.device.subtype, ) elif not isinstance(sensor, RfxtrxBinarySensor): return else: _LOGGER.debug( "Binary sensor update (Device ID: %s Class: %s Sub: %s)", slugify(event.device.id_string.lower()), event.device.__class__.__name__, event.device.subtype, ) if sensor.is_lighting4: if sensor.data_bits is not None: cmd = get_pt2262_cmd(device_id, sensor.data_bits) sensor.apply_cmd(int(cmd, 16)) else: sensor.update_state(True) else: apply_received_command(event) if ( sensor.is_on and sensor.off_delay is not None and sensor.delay_listener is None ): def off_delay_listener(now): """Switch device off after a delay.""" sensor.delay_listener = None sensor.update_state(False) sensor.delay_listener = evt.track_point_in_time( hass, off_delay_listener, dt_util.utcnow() + sensor.off_delay ) # Subscribe to main RFXtrx events if binary_sensor_update not in RECEIVED_EVT_SUBSCRIBERS: RECEIVED_EVT_SUBSCRIBERS.append(binary_sensor_update) class RfxtrxBinarySensor(BinarySensorEntity): """A representation of a RFXtrx binary sensor.""" def __init__( self, event, name, device_class=None, should_fire=False, off_delay=None, data_bits=None, cmd_on=None, cmd_off=None, ): """Initialize the RFXtrx sensor.""" self.event = event self._name = name self._should_fire_event = should_fire self._device_class = device_class self._off_delay = off_delay self._state = False self.is_lighting4 = event.device.packettype == 0x13 self.delay_listener = None self._data_bits = data_bits self._cmd_on = cmd_on self._cmd_off = cmd_off self._unique_id = f"{slugify(self.event.device.type_string.lower())}_{slugify(self.event.device.id_string.lower())}" if data_bits is not None: self._masked_id = get_pt2262_deviceid( event.device.id_string.lower(), data_bits ) else: self._masked_id = None @property def name(self): """Return the device name.""" return self._name @property def masked_id(self): """Return the masked device id (isolated address bits).""" return self._masked_id @property def data_bits(self): """Return the number of data bits.""" return self._data_bits @property def cmd_on(self): """Return the value of the 'On' command.""" return self._cmd_on @property def cmd_off(self): """Return the value of the 'Off' command.""" return self._cmd_off @property def should_poll(self): """No polling needed.""" return False @property def should_fire_event(self): """Return is the device must fire event.""" return self._should_fire_event @property def device_class(self): """Return the sensor class.""" return self._device_class @property def off_delay(self): """Return the off_delay attribute value.""" return self._off_delay @property def is_on(self): """Return true if the sensor state is True.""" return self._state @property def unique_id(self): """Return unique identifier of remote device.""" return self._unique_id def apply_cmd(self, cmd): """Apply a command for updating the state.""" if cmd == self.cmd_on: self.update_state(True) elif cmd == self.cmd_off: self.update_state(False) def update_state(self, state): """Update the state of the device.""" self._state = state self.schedule_update_ha_state()
"""Tests for the SolarEdge config flow.""" import pytest from requests.exceptions import ConnectTimeout, HTTPError from homeassistant import data_entry_flow from homeassistant.components.solaredge import config_flow from homeassistant.components.solaredge.const import CONF_SITE_ID, DEFAULT_NAME from homeassistant.const import CONF_API_KEY, CONF_NAME from tests.async_mock import Mock, patch from tests.common import MockConfigEntry NAME = "solaredge site 1 2 3" SITE_ID = "1a2b3c4d5e6f7g8h" API_KEY = "a1b2c3d4e5f6g7h8" @pytest.fixture(name="test_api") def mock_controller(): """Mock a successful Solaredge API.""" api = Mock() api.get_details.return_value = {"details": {"status": "active"}} with patch("solaredge.Solaredge", return_value=api): yield api def init_config_flow(hass): """Init a configuration flow.""" flow = config_flow.SolarEdgeConfigFlow() flow.hass = hass return flow async def test_user(hass, test_api): """Test user config.""" flow = init_config_flow(hass) result = await flow.async_step_user() assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "user" # tets with all provided result = await flow.async_step_user( {CONF_NAME: NAME, CONF_API_KEY: API_KEY, CONF_SITE_ID: SITE_ID} ) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["title"] == "solaredge_site_1_2_3" assert result["data"][CONF_SITE_ID] == SITE_ID assert result["data"][CONF_API_KEY] == API_KEY async def test_import(hass, test_api): """Test import step.""" flow = init_config_flow(hass) # import with site_id and api_key result = await flow.async_step_import( {CONF_API_KEY: API_KEY, CONF_SITE_ID: SITE_ID} ) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["title"] == "solaredge" assert result["data"][CONF_SITE_ID] == SITE_ID assert result["data"][CONF_API_KEY] == API_KEY # import with all result = await flow.async_step_import( {CONF_API_KEY: API_KEY, CONF_SITE_ID: SITE_ID, CONF_NAME: NAME} ) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["title"] == "solaredge_site_1_2_3" assert result["data"][CONF_SITE_ID] == SITE_ID assert result["data"][CONF_API_KEY] == API_KEY async def test_abort_if_already_setup(hass, test_api): """Test we abort if the site_id is already setup.""" flow = init_config_flow(hass) MockConfigEntry( domain="solaredge", data={CONF_NAME: DEFAULT_NAME, CONF_SITE_ID: SITE_ID, CONF_API_KEY: API_KEY}, ).add_to_hass(hass) # import: Should fail, same SITE_ID result = await flow.async_step_import( {CONF_NAME: DEFAULT_NAME, CONF_SITE_ID: SITE_ID, CONF_API_KEY: API_KEY} ) assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT assert result["reason"] == "site_exists" # user: Should fail, same SITE_ID result = await flow.async_step_user( {CONF_NAME: "test", CONF_SITE_ID: SITE_ID, CONF_API_KEY: "test"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["errors"] == {CONF_SITE_ID: "site_exists"} async def test_asserts(hass, test_api): """Test the _site_in_configuration_exists method.""" flow = init_config_flow(hass) # test with inactive site test_api.get_details.return_value = {"details": {"status": "NOK"}} result = await flow.async_step_user( {CONF_NAME: NAME, CONF_API_KEY: API_KEY, CONF_SITE_ID: SITE_ID} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["errors"] == {CONF_SITE_ID: "site_not_active"} # test with api_failure test_api.get_details.return_value = {} result = await flow.async_step_user( {CONF_NAME: NAME, CONF_API_KEY: API_KEY, CONF_SITE_ID: SITE_ID} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["errors"] == {CONF_SITE_ID: "api_failure"} # test with ConnectionTimeout test_api.get_details.side_effect = ConnectTimeout() result = await flow.async_step_user( {CONF_NAME: NAME, CONF_API_KEY: API_KEY, CONF_SITE_ID: SITE_ID} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["errors"] == {CONF_SITE_ID: "could_not_connect"} # test with HTTPError test_api.get_details.side_effect = HTTPError() result = await flow.async_step_user( {CONF_NAME: NAME, CONF_API_KEY: API_KEY, CONF_SITE_ID: SITE_ID} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["errors"] == {CONF_SITE_ID: "could_not_connect"}
robbiet480/home-assistant
tests/components/solaredge/test_config_flow.py
homeassistant/components/rfxtrx/binary_sensor.py
"""Define a config flow manager for AirVisual.""" import asyncio from pyairvisual import Client from pyairvisual.errors import InvalidKeyError, NodeProError import voluptuous as vol from homeassistant import config_entries from homeassistant.const import ( CONF_API_KEY, CONF_IP_ADDRESS, CONF_LATITUDE, CONF_LONGITUDE, CONF_PASSWORD, CONF_SHOW_ON_MAP, ) from homeassistant.core import callback from homeassistant.helpers import aiohttp_client, config_validation as cv from . import async_get_geography_id from .const import ( # pylint: disable=unused-import CONF_GEOGRAPHIES, CONF_INTEGRATION_TYPE, DOMAIN, INTEGRATION_TYPE_GEOGRAPHY, INTEGRATION_TYPE_NODE_PRO, LOGGER, ) class AirVisualFlowHandler(config_entries.ConfigFlow, domain=DOMAIN): """Handle an AirVisual config flow.""" VERSION = 2 CONNECTION_CLASS = config_entries.CONN_CLASS_CLOUD_POLL @property def geography_schema(self): """Return the data schema for the cloud API.""" return vol.Schema( { vol.Required(CONF_API_KEY): str, vol.Required( CONF_LATITUDE, default=self.hass.config.latitude ): cv.latitude, vol.Required( CONF_LONGITUDE, default=self.hass.config.longitude ): cv.longitude, } ) @property def pick_integration_type_schema(self): """Return the data schema for picking the integration type.""" return vol.Schema( { vol.Required("type"): vol.In( [INTEGRATION_TYPE_GEOGRAPHY, INTEGRATION_TYPE_NODE_PRO] ) } ) @property def node_pro_schema(self): """Return the data schema for a Node/Pro.""" return vol.Schema( {vol.Required(CONF_IP_ADDRESS): str, vol.Required(CONF_PASSWORD): str} ) async def _async_set_unique_id(self, unique_id): """Set the unique ID of the config flow and abort if it already exists.""" await self.async_set_unique_id(unique_id) self._abort_if_unique_id_configured() @staticmethod @callback def async_get_options_flow(config_entry): """Define the config flow to handle options.""" return AirVisualOptionsFlowHandler(config_entry) async def async_step_geography(self, user_input=None): """Handle the initialization of the integration via the cloud API.""" if not user_input: return self.async_show_form( step_id="geography", data_schema=self.geography_schema ) geo_id = async_get_geography_id(user_input) await self._async_set_unique_id(geo_id) self._abort_if_unique_id_configured() # Find older config entries without unique ID: for entry in self._async_current_entries(): if entry.version != 1: continue if any( geo_id == async_get_geography_id(geography) for geography in entry.data[CONF_GEOGRAPHIES] ): return self.async_abort(reason="already_configured") websession = aiohttp_client.async_get_clientsession(self.hass) client = Client(session=websession, api_key=user_input[CONF_API_KEY]) # If this is the first (and only the first) time we've seen this API key, check # that it's valid: checked_keys = self.hass.data.setdefault("airvisual_checked_api_keys", set()) check_keys_lock = self.hass.data.setdefault( "airvisual_checked_api_keys_lock", asyncio.Lock() ) async with check_keys_lock: if user_input[CONF_API_KEY] not in checked_keys: try: await client.api.nearest_city() except InvalidKeyError: return self.async_show_form( step_id="geography", data_schema=self.geography_schema, errors={CONF_API_KEY: "invalid_api_key"}, ) checked_keys.add(user_input[CONF_API_KEY]) return self.async_create_entry( title=f"Cloud API ({geo_id})", data={**user_input, CONF_INTEGRATION_TYPE: INTEGRATION_TYPE_GEOGRAPHY}, ) async def async_step_import(self, import_config): """Import a config entry from configuration.yaml.""" return await self.async_step_geography(import_config) async def async_step_node_pro(self, user_input=None): """Handle the initialization of the integration with a Node/Pro.""" if not user_input: return self.async_show_form( step_id="node_pro", data_schema=self.node_pro_schema ) await self._async_set_unique_id(user_input[CONF_IP_ADDRESS]) websession = aiohttp_client.async_get_clientsession(self.hass) client = Client(session=websession) try: await client.node.from_samba( user_input[CONF_IP_ADDRESS], user_input[CONF_PASSWORD], include_history=False, include_trends=False, ) except NodeProError as err: LOGGER.error("Error connecting to Node/Pro unit: %s", err) return self.async_show_form( step_id="node_pro", data_schema=self.node_pro_schema, errors={CONF_IP_ADDRESS: "unable_to_connect"}, ) return self.async_create_entry( title=f"Node/Pro ({user_input[CONF_IP_ADDRESS]})", data={**user_input, CONF_INTEGRATION_TYPE: INTEGRATION_TYPE_NODE_PRO}, ) async def async_step_user(self, user_input=None): """Handle the start of the config flow.""" if not user_input: return self.async_show_form( step_id="user", data_schema=self.pick_integration_type_schema ) if user_input["type"] == INTEGRATION_TYPE_GEOGRAPHY: return await self.async_step_geography() return await self.async_step_node_pro() class AirVisualOptionsFlowHandler(config_entries.OptionsFlow): """Handle an AirVisual options flow.""" def __init__(self, config_entry): """Initialize.""" self.config_entry = config_entry async def async_step_init(self, user_input=None): """Manage the options.""" if user_input is not None: return self.async_create_entry(title="", data=user_input) return self.async_show_form( step_id="init", data_schema=vol.Schema( { vol.Required( CONF_SHOW_ON_MAP, default=self.config_entry.options.get(CONF_SHOW_ON_MAP), ): bool } ), )
"""Tests for the SolarEdge config flow.""" import pytest from requests.exceptions import ConnectTimeout, HTTPError from homeassistant import data_entry_flow from homeassistant.components.solaredge import config_flow from homeassistant.components.solaredge.const import CONF_SITE_ID, DEFAULT_NAME from homeassistant.const import CONF_API_KEY, CONF_NAME from tests.async_mock import Mock, patch from tests.common import MockConfigEntry NAME = "solaredge site 1 2 3" SITE_ID = "1a2b3c4d5e6f7g8h" API_KEY = "a1b2c3d4e5f6g7h8" @pytest.fixture(name="test_api") def mock_controller(): """Mock a successful Solaredge API.""" api = Mock() api.get_details.return_value = {"details": {"status": "active"}} with patch("solaredge.Solaredge", return_value=api): yield api def init_config_flow(hass): """Init a configuration flow.""" flow = config_flow.SolarEdgeConfigFlow() flow.hass = hass return flow async def test_user(hass, test_api): """Test user config.""" flow = init_config_flow(hass) result = await flow.async_step_user() assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "user" # tets with all provided result = await flow.async_step_user( {CONF_NAME: NAME, CONF_API_KEY: API_KEY, CONF_SITE_ID: SITE_ID} ) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["title"] == "solaredge_site_1_2_3" assert result["data"][CONF_SITE_ID] == SITE_ID assert result["data"][CONF_API_KEY] == API_KEY async def test_import(hass, test_api): """Test import step.""" flow = init_config_flow(hass) # import with site_id and api_key result = await flow.async_step_import( {CONF_API_KEY: API_KEY, CONF_SITE_ID: SITE_ID} ) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["title"] == "solaredge" assert result["data"][CONF_SITE_ID] == SITE_ID assert result["data"][CONF_API_KEY] == API_KEY # import with all result = await flow.async_step_import( {CONF_API_KEY: API_KEY, CONF_SITE_ID: SITE_ID, CONF_NAME: NAME} ) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["title"] == "solaredge_site_1_2_3" assert result["data"][CONF_SITE_ID] == SITE_ID assert result["data"][CONF_API_KEY] == API_KEY async def test_abort_if_already_setup(hass, test_api): """Test we abort if the site_id is already setup.""" flow = init_config_flow(hass) MockConfigEntry( domain="solaredge", data={CONF_NAME: DEFAULT_NAME, CONF_SITE_ID: SITE_ID, CONF_API_KEY: API_KEY}, ).add_to_hass(hass) # import: Should fail, same SITE_ID result = await flow.async_step_import( {CONF_NAME: DEFAULT_NAME, CONF_SITE_ID: SITE_ID, CONF_API_KEY: API_KEY} ) assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT assert result["reason"] == "site_exists" # user: Should fail, same SITE_ID result = await flow.async_step_user( {CONF_NAME: "test", CONF_SITE_ID: SITE_ID, CONF_API_KEY: "test"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["errors"] == {CONF_SITE_ID: "site_exists"} async def test_asserts(hass, test_api): """Test the _site_in_configuration_exists method.""" flow = init_config_flow(hass) # test with inactive site test_api.get_details.return_value = {"details": {"status": "NOK"}} result = await flow.async_step_user( {CONF_NAME: NAME, CONF_API_KEY: API_KEY, CONF_SITE_ID: SITE_ID} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["errors"] == {CONF_SITE_ID: "site_not_active"} # test with api_failure test_api.get_details.return_value = {} result = await flow.async_step_user( {CONF_NAME: NAME, CONF_API_KEY: API_KEY, CONF_SITE_ID: SITE_ID} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["errors"] == {CONF_SITE_ID: "api_failure"} # test with ConnectionTimeout test_api.get_details.side_effect = ConnectTimeout() result = await flow.async_step_user( {CONF_NAME: NAME, CONF_API_KEY: API_KEY, CONF_SITE_ID: SITE_ID} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["errors"] == {CONF_SITE_ID: "could_not_connect"} # test with HTTPError test_api.get_details.side_effect = HTTPError() result = await flow.async_step_user( {CONF_NAME: NAME, CONF_API_KEY: API_KEY, CONF_SITE_ID: SITE_ID} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["errors"] == {CONF_SITE_ID: "could_not_connect"}
robbiet480/home-assistant
tests/components/solaredge/test_config_flow.py
homeassistant/components/airvisual/config_flow.py
"""Support for binary sensor using I2C MCP23017 chip.""" import logging from adafruit_mcp230xx.mcp23017 import MCP23017 # pylint: disable=import-error import board # pylint: disable=import-error import busio # pylint: disable=import-error import digitalio # pylint: disable=import-error import voluptuous as vol from homeassistant.components.binary_sensor import PLATFORM_SCHEMA, BinarySensorEntity from homeassistant.const import DEVICE_DEFAULT_NAME import homeassistant.helpers.config_validation as cv _LOGGER = logging.getLogger(__name__) CONF_INVERT_LOGIC = "invert_logic" CONF_I2C_ADDRESS = "i2c_address" CONF_PINS = "pins" CONF_PULL_MODE = "pull_mode" MODE_UP = "UP" MODE_DOWN = "DOWN" DEFAULT_INVERT_LOGIC = False DEFAULT_I2C_ADDRESS = 0x20 DEFAULT_PULL_MODE = MODE_UP _SENSORS_SCHEMA = vol.Schema({cv.positive_int: cv.string}) PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( { vol.Required(CONF_PINS): _SENSORS_SCHEMA, vol.Optional(CONF_INVERT_LOGIC, default=DEFAULT_INVERT_LOGIC): cv.boolean, vol.Optional(CONF_PULL_MODE, default=DEFAULT_PULL_MODE): vol.All( vol.Upper, vol.In([MODE_UP, MODE_DOWN]) ), vol.Optional(CONF_I2C_ADDRESS, default=DEFAULT_I2C_ADDRESS): vol.Coerce(int), } ) def setup_platform(hass, config, add_devices, discovery_info=None): """Set up the MCP23017 binary sensors.""" pull_mode = config[CONF_PULL_MODE] invert_logic = config[CONF_INVERT_LOGIC] i2c_address = config[CONF_I2C_ADDRESS] i2c = busio.I2C(board.SCL, board.SDA) mcp = MCP23017(i2c, address=i2c_address) binary_sensors = [] pins = config[CONF_PINS] for pin_num, pin_name in pins.items(): pin = mcp.get_pin(pin_num) binary_sensors.append( MCP23017BinarySensor(pin_name, pin, pull_mode, invert_logic) ) add_devices(binary_sensors, True) class MCP23017BinarySensor(BinarySensorEntity): """Represent a binary sensor that uses MCP23017.""" def __init__(self, name, pin, pull_mode, invert_logic): """Initialize the MCP23017 binary sensor.""" self._name = name or DEVICE_DEFAULT_NAME self._pin = pin self._pull_mode = pull_mode self._invert_logic = invert_logic self._state = None self._pin.direction = digitalio.Direction.INPUT self._pin.pull = digitalio.Pull.UP @property def name(self): """Return the name of the sensor.""" return self._name @property def is_on(self): """Return the state of the entity.""" return self._state != self._invert_logic def update(self): """Update the GPIO state.""" self._state = self._pin.value
"""Tests for the SolarEdge config flow.""" import pytest from requests.exceptions import ConnectTimeout, HTTPError from homeassistant import data_entry_flow from homeassistant.components.solaredge import config_flow from homeassistant.components.solaredge.const import CONF_SITE_ID, DEFAULT_NAME from homeassistant.const import CONF_API_KEY, CONF_NAME from tests.async_mock import Mock, patch from tests.common import MockConfigEntry NAME = "solaredge site 1 2 3" SITE_ID = "1a2b3c4d5e6f7g8h" API_KEY = "a1b2c3d4e5f6g7h8" @pytest.fixture(name="test_api") def mock_controller(): """Mock a successful Solaredge API.""" api = Mock() api.get_details.return_value = {"details": {"status": "active"}} with patch("solaredge.Solaredge", return_value=api): yield api def init_config_flow(hass): """Init a configuration flow.""" flow = config_flow.SolarEdgeConfigFlow() flow.hass = hass return flow async def test_user(hass, test_api): """Test user config.""" flow = init_config_flow(hass) result = await flow.async_step_user() assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "user" # tets with all provided result = await flow.async_step_user( {CONF_NAME: NAME, CONF_API_KEY: API_KEY, CONF_SITE_ID: SITE_ID} ) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["title"] == "solaredge_site_1_2_3" assert result["data"][CONF_SITE_ID] == SITE_ID assert result["data"][CONF_API_KEY] == API_KEY async def test_import(hass, test_api): """Test import step.""" flow = init_config_flow(hass) # import with site_id and api_key result = await flow.async_step_import( {CONF_API_KEY: API_KEY, CONF_SITE_ID: SITE_ID} ) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["title"] == "solaredge" assert result["data"][CONF_SITE_ID] == SITE_ID assert result["data"][CONF_API_KEY] == API_KEY # import with all result = await flow.async_step_import( {CONF_API_KEY: API_KEY, CONF_SITE_ID: SITE_ID, CONF_NAME: NAME} ) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["title"] == "solaredge_site_1_2_3" assert result["data"][CONF_SITE_ID] == SITE_ID assert result["data"][CONF_API_KEY] == API_KEY async def test_abort_if_already_setup(hass, test_api): """Test we abort if the site_id is already setup.""" flow = init_config_flow(hass) MockConfigEntry( domain="solaredge", data={CONF_NAME: DEFAULT_NAME, CONF_SITE_ID: SITE_ID, CONF_API_KEY: API_KEY}, ).add_to_hass(hass) # import: Should fail, same SITE_ID result = await flow.async_step_import( {CONF_NAME: DEFAULT_NAME, CONF_SITE_ID: SITE_ID, CONF_API_KEY: API_KEY} ) assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT assert result["reason"] == "site_exists" # user: Should fail, same SITE_ID result = await flow.async_step_user( {CONF_NAME: "test", CONF_SITE_ID: SITE_ID, CONF_API_KEY: "test"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["errors"] == {CONF_SITE_ID: "site_exists"} async def test_asserts(hass, test_api): """Test the _site_in_configuration_exists method.""" flow = init_config_flow(hass) # test with inactive site test_api.get_details.return_value = {"details": {"status": "NOK"}} result = await flow.async_step_user( {CONF_NAME: NAME, CONF_API_KEY: API_KEY, CONF_SITE_ID: SITE_ID} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["errors"] == {CONF_SITE_ID: "site_not_active"} # test with api_failure test_api.get_details.return_value = {} result = await flow.async_step_user( {CONF_NAME: NAME, CONF_API_KEY: API_KEY, CONF_SITE_ID: SITE_ID} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["errors"] == {CONF_SITE_ID: "api_failure"} # test with ConnectionTimeout test_api.get_details.side_effect = ConnectTimeout() result = await flow.async_step_user( {CONF_NAME: NAME, CONF_API_KEY: API_KEY, CONF_SITE_ID: SITE_ID} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["errors"] == {CONF_SITE_ID: "could_not_connect"} # test with HTTPError test_api.get_details.side_effect = HTTPError() result = await flow.async_step_user( {CONF_NAME: NAME, CONF_API_KEY: API_KEY, CONF_SITE_ID: SITE_ID} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["errors"] == {CONF_SITE_ID: "could_not_connect"}
robbiet480/home-assistant
tests/components/solaredge/test_config_flow.py
homeassistant/components/mcp23017/binary_sensor.py
"""Support for OpenTherm Gateway devices.""" import asyncio from datetime import date, datetime import logging import pyotgw import pyotgw.vars as gw_vars import voluptuous as vol from homeassistant.components.binary_sensor import DOMAIN as COMP_BINARY_SENSOR from homeassistant.components.climate import DOMAIN as COMP_CLIMATE from homeassistant.components.sensor import DOMAIN as COMP_SENSOR from homeassistant.config_entries import SOURCE_IMPORT from homeassistant.const import ( ATTR_DATE, ATTR_ID, ATTR_MODE, ATTR_TEMPERATURE, ATTR_TIME, CONF_DEVICE, CONF_ID, CONF_NAME, EVENT_HOMEASSISTANT_STOP, PRECISION_HALVES, PRECISION_TENTHS, PRECISION_WHOLE, ) import homeassistant.helpers.config_validation as cv from homeassistant.helpers.dispatcher import async_dispatcher_send from .const import ( ATTR_DHW_OVRD, ATTR_GW_ID, ATTR_LEVEL, CONF_CLIMATE, CONF_FLOOR_TEMP, CONF_PRECISION, DATA_GATEWAYS, DATA_OPENTHERM_GW, DOMAIN, SERVICE_RESET_GATEWAY, SERVICE_SET_CLOCK, SERVICE_SET_CONTROL_SETPOINT, SERVICE_SET_GPIO_MODE, SERVICE_SET_HOT_WATER_OVRD, SERVICE_SET_HOT_WATER_SETPOINT, SERVICE_SET_LED_MODE, SERVICE_SET_MAX_MOD, SERVICE_SET_OAT, SERVICE_SET_SB_TEMP, ) _LOGGER = logging.getLogger(__name__) CLIMATE_SCHEMA = vol.Schema( { vol.Optional(CONF_PRECISION): vol.In( [PRECISION_TENTHS, PRECISION_HALVES, PRECISION_WHOLE] ), vol.Optional(CONF_FLOOR_TEMP, default=False): cv.boolean, } ) CONFIG_SCHEMA = vol.Schema( { DOMAIN: cv.schema_with_slug_keys( { vol.Required(CONF_DEVICE): cv.string, vol.Optional(CONF_CLIMATE, default={}): CLIMATE_SCHEMA, vol.Optional(CONF_NAME): cv.string, } ) }, extra=vol.ALLOW_EXTRA, ) async def options_updated(hass, entry): """Handle options update.""" gateway = hass.data[DATA_OPENTHERM_GW][DATA_GATEWAYS][entry.data[CONF_ID]] async_dispatcher_send(hass, gateway.options_update_signal, entry) async def async_setup_entry(hass, config_entry): """Set up the OpenTherm Gateway component.""" if DATA_OPENTHERM_GW not in hass.data: hass.data[DATA_OPENTHERM_GW] = {DATA_GATEWAYS: {}} gateway = OpenThermGatewayDevice(hass, config_entry) hass.data[DATA_OPENTHERM_GW][DATA_GATEWAYS][config_entry.data[CONF_ID]] = gateway config_entry.add_update_listener(options_updated) # Schedule directly on the loop to avoid blocking HA startup. hass.loop.create_task(gateway.connect_and_subscribe()) for comp in [COMP_BINARY_SENSOR, COMP_CLIMATE, COMP_SENSOR]: hass.async_create_task( hass.config_entries.async_forward_entry_setup(config_entry, comp) ) register_services(hass) return True async def async_setup(hass, config): """Set up the OpenTherm Gateway component.""" if not hass.config_entries.async_entries(DOMAIN) and DOMAIN in config: conf = config[DOMAIN] for device_id, device_config in conf.items(): device_config[CONF_ID] = device_id hass.async_create_task( hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_IMPORT}, data=device_config ) ) return True def register_services(hass): """Register services for the component.""" service_reset_schema = vol.Schema( { vol.Required(ATTR_GW_ID): vol.All( cv.string, vol.In(hass.data[DATA_OPENTHERM_GW][DATA_GATEWAYS]) ) } ) service_set_clock_schema = vol.Schema( { vol.Required(ATTR_GW_ID): vol.All( cv.string, vol.In(hass.data[DATA_OPENTHERM_GW][DATA_GATEWAYS]) ), vol.Optional(ATTR_DATE, default=date.today()): cv.date, vol.Optional(ATTR_TIME, default=datetime.now().time()): cv.time, } ) service_set_control_setpoint_schema = vol.Schema( { vol.Required(ATTR_GW_ID): vol.All( cv.string, vol.In(hass.data[DATA_OPENTHERM_GW][DATA_GATEWAYS]) ), vol.Required(ATTR_TEMPERATURE): vol.All( vol.Coerce(float), vol.Range(min=0, max=90) ), } ) service_set_hot_water_setpoint_schema = service_set_control_setpoint_schema service_set_hot_water_ovrd_schema = vol.Schema( { vol.Required(ATTR_GW_ID): vol.All( cv.string, vol.In(hass.data[DATA_OPENTHERM_GW][DATA_GATEWAYS]) ), vol.Required(ATTR_DHW_OVRD): vol.Any( vol.Equal("A"), vol.All(vol.Coerce(int), vol.Range(min=0, max=1)) ), } ) service_set_gpio_mode_schema = vol.Schema( vol.Any( vol.Schema( { vol.Required(ATTR_GW_ID): vol.All( cv.string, vol.In(hass.data[DATA_OPENTHERM_GW][DATA_GATEWAYS]) ), vol.Required(ATTR_ID): vol.Equal("A"), vol.Required(ATTR_MODE): vol.All( vol.Coerce(int), vol.Range(min=0, max=6) ), } ), vol.Schema( { vol.Required(ATTR_GW_ID): vol.All( cv.string, vol.In(hass.data[DATA_OPENTHERM_GW][DATA_GATEWAYS]) ), vol.Required(ATTR_ID): vol.Equal("B"), vol.Required(ATTR_MODE): vol.All( vol.Coerce(int), vol.Range(min=0, max=7) ), } ), ) ) service_set_led_mode_schema = vol.Schema( { vol.Required(ATTR_GW_ID): vol.All( cv.string, vol.In(hass.data[DATA_OPENTHERM_GW][DATA_GATEWAYS]) ), vol.Required(ATTR_ID): vol.In("ABCDEF"), vol.Required(ATTR_MODE): vol.In("RXTBOFHWCEMP"), } ) service_set_max_mod_schema = vol.Schema( { vol.Required(ATTR_GW_ID): vol.All( cv.string, vol.In(hass.data[DATA_OPENTHERM_GW][DATA_GATEWAYS]) ), vol.Required(ATTR_LEVEL): vol.All( vol.Coerce(int), vol.Range(min=-1, max=100) ), } ) service_set_oat_schema = vol.Schema( { vol.Required(ATTR_GW_ID): vol.All( cv.string, vol.In(hass.data[DATA_OPENTHERM_GW][DATA_GATEWAYS]) ), vol.Required(ATTR_TEMPERATURE): vol.All( vol.Coerce(float), vol.Range(min=-40, max=99) ), } ) service_set_sb_temp_schema = vol.Schema( { vol.Required(ATTR_GW_ID): vol.All( cv.string, vol.In(hass.data[DATA_OPENTHERM_GW][DATA_GATEWAYS]) ), vol.Required(ATTR_TEMPERATURE): vol.All( vol.Coerce(float), vol.Range(min=0, max=30) ), } ) async def reset_gateway(call): """Reset the OpenTherm Gateway.""" gw_dev = hass.data[DATA_OPENTHERM_GW][DATA_GATEWAYS][call.data[ATTR_GW_ID]] mode_rst = gw_vars.OTGW_MODE_RESET status = await gw_dev.gateway.set_mode(mode_rst) gw_dev.status = status async_dispatcher_send(hass, gw_dev.update_signal, gw_dev.status) hass.services.async_register( DOMAIN, SERVICE_RESET_GATEWAY, reset_gateway, service_reset_schema ) async def set_control_setpoint(call): """Set the control setpoint on the OpenTherm Gateway.""" gw_dev = hass.data[DATA_OPENTHERM_GW][DATA_GATEWAYS][call.data[ATTR_GW_ID]] gw_var = gw_vars.DATA_CONTROL_SETPOINT value = await gw_dev.gateway.set_control_setpoint(call.data[ATTR_TEMPERATURE]) gw_dev.status.update({gw_var: value}) async_dispatcher_send(hass, gw_dev.update_signal, gw_dev.status) hass.services.async_register( DOMAIN, SERVICE_SET_CONTROL_SETPOINT, set_control_setpoint, service_set_control_setpoint_schema, ) async def set_dhw_ovrd(call): """Set the domestic hot water override on the OpenTherm Gateway.""" gw_dev = hass.data[DATA_OPENTHERM_GW][DATA_GATEWAYS][call.data[ATTR_GW_ID]] gw_var = gw_vars.OTGW_DHW_OVRD value = await gw_dev.gateway.set_hot_water_ovrd(call.data[ATTR_DHW_OVRD]) gw_dev.status.update({gw_var: value}) async_dispatcher_send(hass, gw_dev.update_signal, gw_dev.status) hass.services.async_register( DOMAIN, SERVICE_SET_HOT_WATER_OVRD, set_dhw_ovrd, service_set_hot_water_ovrd_schema, ) async def set_dhw_setpoint(call): """Set the domestic hot water setpoint on the OpenTherm Gateway.""" gw_dev = hass.data[DATA_OPENTHERM_GW][DATA_GATEWAYS][call.data[ATTR_GW_ID]] gw_var = gw_vars.DATA_DHW_SETPOINT value = await gw_dev.gateway.set_dhw_setpoint(call.data[ATTR_TEMPERATURE]) gw_dev.status.update({gw_var: value}) async_dispatcher_send(hass, gw_dev.update_signal, gw_dev.status) hass.services.async_register( DOMAIN, SERVICE_SET_HOT_WATER_SETPOINT, set_dhw_setpoint, service_set_hot_water_setpoint_schema, ) async def set_device_clock(call): """Set the clock on the OpenTherm Gateway.""" gw_dev = hass.data[DATA_OPENTHERM_GW][DATA_GATEWAYS][call.data[ATTR_GW_ID]] attr_date = call.data[ATTR_DATE] attr_time = call.data[ATTR_TIME] await gw_dev.gateway.set_clock(datetime.combine(attr_date, attr_time)) hass.services.async_register( DOMAIN, SERVICE_SET_CLOCK, set_device_clock, service_set_clock_schema ) async def set_gpio_mode(call): """Set the OpenTherm Gateway GPIO modes.""" gw_dev = hass.data[DATA_OPENTHERM_GW][DATA_GATEWAYS][call.data[ATTR_GW_ID]] gpio_id = call.data[ATTR_ID] gpio_mode = call.data[ATTR_MODE] mode = await gw_dev.gateway.set_gpio_mode(gpio_id, gpio_mode) gpio_var = getattr(gw_vars, f"OTGW_GPIO_{gpio_id}") gw_dev.status.update({gpio_var: mode}) async_dispatcher_send(hass, gw_dev.update_signal, gw_dev.status) hass.services.async_register( DOMAIN, SERVICE_SET_GPIO_MODE, set_gpio_mode, service_set_gpio_mode_schema ) async def set_led_mode(call): """Set the OpenTherm Gateway LED modes.""" gw_dev = hass.data[DATA_OPENTHERM_GW][DATA_GATEWAYS][call.data[ATTR_GW_ID]] led_id = call.data[ATTR_ID] led_mode = call.data[ATTR_MODE] mode = await gw_dev.gateway.set_led_mode(led_id, led_mode) led_var = getattr(gw_vars, f"OTGW_LED_{led_id}") gw_dev.status.update({led_var: mode}) async_dispatcher_send(hass, gw_dev.update_signal, gw_dev.status) hass.services.async_register( DOMAIN, SERVICE_SET_LED_MODE, set_led_mode, service_set_led_mode_schema ) async def set_max_mod(call): """Set the max modulation level.""" gw_dev = hass.data[DATA_OPENTHERM_GW][DATA_GATEWAYS][call.data[ATTR_GW_ID]] gw_var = gw_vars.DATA_SLAVE_MAX_RELATIVE_MOD level = call.data[ATTR_LEVEL] if level == -1: # Backend only clears setting on non-numeric values. level = "-" value = await gw_dev.gateway.set_max_relative_mod(level) gw_dev.status.update({gw_var: value}) async_dispatcher_send(hass, gw_dev.update_signal, gw_dev.status) hass.services.async_register( DOMAIN, SERVICE_SET_MAX_MOD, set_max_mod, service_set_max_mod_schema ) async def set_outside_temp(call): """Provide the outside temperature to the OpenTherm Gateway.""" gw_dev = hass.data[DATA_OPENTHERM_GW][DATA_GATEWAYS][call.data[ATTR_GW_ID]] gw_var = gw_vars.DATA_OUTSIDE_TEMP value = await gw_dev.gateway.set_outside_temp(call.data[ATTR_TEMPERATURE]) gw_dev.status.update({gw_var: value}) async_dispatcher_send(hass, gw_dev.update_signal, gw_dev.status) hass.services.async_register( DOMAIN, SERVICE_SET_OAT, set_outside_temp, service_set_oat_schema ) async def set_setback_temp(call): """Set the OpenTherm Gateway SetBack temperature.""" gw_dev = hass.data[DATA_OPENTHERM_GW][DATA_GATEWAYS][call.data[ATTR_GW_ID]] gw_var = gw_vars.OTGW_SB_TEMP value = await gw_dev.gateway.set_setback_temp(call.data[ATTR_TEMPERATURE]) gw_dev.status.update({gw_var: value}) async_dispatcher_send(hass, gw_dev.update_signal, gw_dev.status) hass.services.async_register( DOMAIN, SERVICE_SET_SB_TEMP, set_setback_temp, service_set_sb_temp_schema ) async def async_unload_entry(hass, entry): """Cleanup and disconnect from gateway.""" await asyncio.gather( hass.config_entries.async_forward_entry_unload(entry, COMP_BINARY_SENSOR), hass.config_entries.async_forward_entry_unload(entry, COMP_CLIMATE), hass.config_entries.async_forward_entry_unload(entry, COMP_SENSOR), ) gateway = hass.data[DATA_OPENTHERM_GW][DATA_GATEWAYS][entry.data[CONF_ID]] await gateway.cleanup() return True class OpenThermGatewayDevice: """OpenTherm Gateway device class.""" def __init__(self, hass, config_entry): """Initialize the OpenTherm Gateway.""" self.hass = hass self.device_path = config_entry.data[CONF_DEVICE] self.gw_id = config_entry.data[CONF_ID] self.name = config_entry.data[CONF_NAME] self.climate_config = config_entry.options self.status = {} self.update_signal = f"{DATA_OPENTHERM_GW}_{self.gw_id}_update" self.options_update_signal = f"{DATA_OPENTHERM_GW}_{self.gw_id}_options_update" self.gateway = pyotgw.pyotgw() self.gw_version = None async def cleanup(self, event=None): """Reset overrides on the gateway.""" await self.gateway.set_control_setpoint(0) await self.gateway.set_max_relative_mod("-") await self.gateway.disconnect() async def connect_and_subscribe(self): """Connect to serial device and subscribe report handler.""" self.status = await self.gateway.connect(self.hass.loop, self.device_path) _LOGGER.debug("Connected to OpenTherm Gateway at %s", self.device_path) self.gw_version = self.status.get(gw_vars.OTGW_BUILD) self.hass.bus.async_listen(EVENT_HOMEASSISTANT_STOP, self.cleanup) async def handle_report(status): """Handle reports from the OpenTherm Gateway.""" _LOGGER.debug("Received report: %s", status) self.status = status async_dispatcher_send(self.hass, self.update_signal, status) self.gateway.subscribe(handle_report)
"""Tests for the SolarEdge config flow.""" import pytest from requests.exceptions import ConnectTimeout, HTTPError from homeassistant import data_entry_flow from homeassistant.components.solaredge import config_flow from homeassistant.components.solaredge.const import CONF_SITE_ID, DEFAULT_NAME from homeassistant.const import CONF_API_KEY, CONF_NAME from tests.async_mock import Mock, patch from tests.common import MockConfigEntry NAME = "solaredge site 1 2 3" SITE_ID = "1a2b3c4d5e6f7g8h" API_KEY = "a1b2c3d4e5f6g7h8" @pytest.fixture(name="test_api") def mock_controller(): """Mock a successful Solaredge API.""" api = Mock() api.get_details.return_value = {"details": {"status": "active"}} with patch("solaredge.Solaredge", return_value=api): yield api def init_config_flow(hass): """Init a configuration flow.""" flow = config_flow.SolarEdgeConfigFlow() flow.hass = hass return flow async def test_user(hass, test_api): """Test user config.""" flow = init_config_flow(hass) result = await flow.async_step_user() assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "user" # tets with all provided result = await flow.async_step_user( {CONF_NAME: NAME, CONF_API_KEY: API_KEY, CONF_SITE_ID: SITE_ID} ) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["title"] == "solaredge_site_1_2_3" assert result["data"][CONF_SITE_ID] == SITE_ID assert result["data"][CONF_API_KEY] == API_KEY async def test_import(hass, test_api): """Test import step.""" flow = init_config_flow(hass) # import with site_id and api_key result = await flow.async_step_import( {CONF_API_KEY: API_KEY, CONF_SITE_ID: SITE_ID} ) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["title"] == "solaredge" assert result["data"][CONF_SITE_ID] == SITE_ID assert result["data"][CONF_API_KEY] == API_KEY # import with all result = await flow.async_step_import( {CONF_API_KEY: API_KEY, CONF_SITE_ID: SITE_ID, CONF_NAME: NAME} ) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["title"] == "solaredge_site_1_2_3" assert result["data"][CONF_SITE_ID] == SITE_ID assert result["data"][CONF_API_KEY] == API_KEY async def test_abort_if_already_setup(hass, test_api): """Test we abort if the site_id is already setup.""" flow = init_config_flow(hass) MockConfigEntry( domain="solaredge", data={CONF_NAME: DEFAULT_NAME, CONF_SITE_ID: SITE_ID, CONF_API_KEY: API_KEY}, ).add_to_hass(hass) # import: Should fail, same SITE_ID result = await flow.async_step_import( {CONF_NAME: DEFAULT_NAME, CONF_SITE_ID: SITE_ID, CONF_API_KEY: API_KEY} ) assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT assert result["reason"] == "site_exists" # user: Should fail, same SITE_ID result = await flow.async_step_user( {CONF_NAME: "test", CONF_SITE_ID: SITE_ID, CONF_API_KEY: "test"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["errors"] == {CONF_SITE_ID: "site_exists"} async def test_asserts(hass, test_api): """Test the _site_in_configuration_exists method.""" flow = init_config_flow(hass) # test with inactive site test_api.get_details.return_value = {"details": {"status": "NOK"}} result = await flow.async_step_user( {CONF_NAME: NAME, CONF_API_KEY: API_KEY, CONF_SITE_ID: SITE_ID} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["errors"] == {CONF_SITE_ID: "site_not_active"} # test with api_failure test_api.get_details.return_value = {} result = await flow.async_step_user( {CONF_NAME: NAME, CONF_API_KEY: API_KEY, CONF_SITE_ID: SITE_ID} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["errors"] == {CONF_SITE_ID: "api_failure"} # test with ConnectionTimeout test_api.get_details.side_effect = ConnectTimeout() result = await flow.async_step_user( {CONF_NAME: NAME, CONF_API_KEY: API_KEY, CONF_SITE_ID: SITE_ID} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["errors"] == {CONF_SITE_ID: "could_not_connect"} # test with HTTPError test_api.get_details.side_effect = HTTPError() result = await flow.async_step_user( {CONF_NAME: NAME, CONF_API_KEY: API_KEY, CONF_SITE_ID: SITE_ID} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["errors"] == {CONF_SITE_ID: "could_not_connect"}
robbiet480/home-assistant
tests/components/solaredge/test_config_flow.py
homeassistant/components/opentherm_gw/__init__.py
"""The broadlink component.""" import asyncio from base64 import b64decode, b64encode from binascii import unhexlify import logging import re from broadlink.exceptions import BroadlinkException, ReadError, StorageError import voluptuous as vol from homeassistant.const import CONF_HOST import homeassistant.helpers.config_validation as cv from homeassistant.util.dt import utcnow from .const import CONF_PACKET, DOMAIN, LEARNING_TIMEOUT, SERVICE_LEARN, SERVICE_SEND _LOGGER = logging.getLogger(__name__) DEFAULT_RETRY = 3 def data_packet(value): """Decode a data packet given for broadlink.""" value = cv.string(value) extra = len(value) % 4 if extra > 0: value = value + ("=" * (4 - extra)) return b64decode(value) def hostname(value): """Validate a hostname.""" host = str(value) if len(host) > 253: raise ValueError if host[-1] == ".": host = host[:-1] allowed = re.compile(r"(?![_-])[a-z\d_-]{1,63}(?<![_-])$", flags=re.IGNORECASE) if not all(allowed.match(elem) for elem in host.split(".")): raise ValueError return host def mac_address(value): """Validate and coerce a 48-bit MAC address.""" mac = str(value).lower() if len(mac) == 17: mac = mac[0:2] + mac[3:5] + mac[6:8] + mac[9:11] + mac[12:14] + mac[15:17] elif len(mac) == 14: mac = mac[0:2] + mac[2:4] + mac[5:7] + mac[7:9] + mac[10:12] + mac[12:14] elif len(mac) != 12: raise ValueError return unhexlify(mac) SERVICE_SEND_SCHEMA = vol.Schema( { vol.Required(CONF_HOST): cv.string, vol.Required(CONF_PACKET): vol.All(cv.ensure_list, [data_packet]), } ) SERVICE_LEARN_SCHEMA = vol.Schema({vol.Required(CONF_HOST): cv.string}) async def async_setup_service(hass, host, device): """Register a device for given host for use in services.""" hass.data.setdefault(DOMAIN, {})[host] = device if hass.services.has_service(DOMAIN, SERVICE_LEARN): return async def async_learn_command(call): """Learn a packet from remote.""" device = hass.data[DOMAIN][call.data[CONF_HOST]] try: await device.async_request(device.api.enter_learning) except BroadlinkException as err_msg: _LOGGER.error("Failed to enter learning mode: %s", err_msg) return _LOGGER.info("Press the key you want Home Assistant to learn") start_time = utcnow() while (utcnow() - start_time) < LEARNING_TIMEOUT: await asyncio.sleep(1) try: packet = await device.async_request(device.api.check_data) except (ReadError, StorageError): continue except BroadlinkException as err_msg: _LOGGER.error("Failed to learn: %s", err_msg) return else: data = b64encode(packet).decode("utf8") log_msg = f"Received packet is: {data}" _LOGGER.info(log_msg) hass.components.persistent_notification.async_create( log_msg, title="Broadlink switch" ) return _LOGGER.error("Failed to learn: No signal received") hass.components.persistent_notification.async_create( "No signal was received", title="Broadlink switch" ) hass.services.async_register( DOMAIN, SERVICE_LEARN, async_learn_command, schema=SERVICE_LEARN_SCHEMA ) async def async_send_packet(call): """Send a packet.""" device = hass.data[DOMAIN][call.data[CONF_HOST]] packets = call.data[CONF_PACKET] for packet in packets: try: await device.async_request(device.api.send_data, packet) except BroadlinkException as err_msg: _LOGGER.error("Failed to send packet: %s", err_msg) return hass.services.async_register( DOMAIN, SERVICE_SEND, async_send_packet, schema=SERVICE_SEND_SCHEMA )
"""Tests for the SolarEdge config flow.""" import pytest from requests.exceptions import ConnectTimeout, HTTPError from homeassistant import data_entry_flow from homeassistant.components.solaredge import config_flow from homeassistant.components.solaredge.const import CONF_SITE_ID, DEFAULT_NAME from homeassistant.const import CONF_API_KEY, CONF_NAME from tests.async_mock import Mock, patch from tests.common import MockConfigEntry NAME = "solaredge site 1 2 3" SITE_ID = "1a2b3c4d5e6f7g8h" API_KEY = "a1b2c3d4e5f6g7h8" @pytest.fixture(name="test_api") def mock_controller(): """Mock a successful Solaredge API.""" api = Mock() api.get_details.return_value = {"details": {"status": "active"}} with patch("solaredge.Solaredge", return_value=api): yield api def init_config_flow(hass): """Init a configuration flow.""" flow = config_flow.SolarEdgeConfigFlow() flow.hass = hass return flow async def test_user(hass, test_api): """Test user config.""" flow = init_config_flow(hass) result = await flow.async_step_user() assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "user" # tets with all provided result = await flow.async_step_user( {CONF_NAME: NAME, CONF_API_KEY: API_KEY, CONF_SITE_ID: SITE_ID} ) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["title"] == "solaredge_site_1_2_3" assert result["data"][CONF_SITE_ID] == SITE_ID assert result["data"][CONF_API_KEY] == API_KEY async def test_import(hass, test_api): """Test import step.""" flow = init_config_flow(hass) # import with site_id and api_key result = await flow.async_step_import( {CONF_API_KEY: API_KEY, CONF_SITE_ID: SITE_ID} ) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["title"] == "solaredge" assert result["data"][CONF_SITE_ID] == SITE_ID assert result["data"][CONF_API_KEY] == API_KEY # import with all result = await flow.async_step_import( {CONF_API_KEY: API_KEY, CONF_SITE_ID: SITE_ID, CONF_NAME: NAME} ) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["title"] == "solaredge_site_1_2_3" assert result["data"][CONF_SITE_ID] == SITE_ID assert result["data"][CONF_API_KEY] == API_KEY async def test_abort_if_already_setup(hass, test_api): """Test we abort if the site_id is already setup.""" flow = init_config_flow(hass) MockConfigEntry( domain="solaredge", data={CONF_NAME: DEFAULT_NAME, CONF_SITE_ID: SITE_ID, CONF_API_KEY: API_KEY}, ).add_to_hass(hass) # import: Should fail, same SITE_ID result = await flow.async_step_import( {CONF_NAME: DEFAULT_NAME, CONF_SITE_ID: SITE_ID, CONF_API_KEY: API_KEY} ) assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT assert result["reason"] == "site_exists" # user: Should fail, same SITE_ID result = await flow.async_step_user( {CONF_NAME: "test", CONF_SITE_ID: SITE_ID, CONF_API_KEY: "test"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["errors"] == {CONF_SITE_ID: "site_exists"} async def test_asserts(hass, test_api): """Test the _site_in_configuration_exists method.""" flow = init_config_flow(hass) # test with inactive site test_api.get_details.return_value = {"details": {"status": "NOK"}} result = await flow.async_step_user( {CONF_NAME: NAME, CONF_API_KEY: API_KEY, CONF_SITE_ID: SITE_ID} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["errors"] == {CONF_SITE_ID: "site_not_active"} # test with api_failure test_api.get_details.return_value = {} result = await flow.async_step_user( {CONF_NAME: NAME, CONF_API_KEY: API_KEY, CONF_SITE_ID: SITE_ID} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["errors"] == {CONF_SITE_ID: "api_failure"} # test with ConnectionTimeout test_api.get_details.side_effect = ConnectTimeout() result = await flow.async_step_user( {CONF_NAME: NAME, CONF_API_KEY: API_KEY, CONF_SITE_ID: SITE_ID} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["errors"] == {CONF_SITE_ID: "could_not_connect"} # test with HTTPError test_api.get_details.side_effect = HTTPError() result = await flow.async_step_user( {CONF_NAME: NAME, CONF_API_KEY: API_KEY, CONF_SITE_ID: SITE_ID} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["errors"] == {CONF_SITE_ID: "could_not_connect"}
robbiet480/home-assistant
tests/components/solaredge/test_config_flow.py
homeassistant/components/broadlink/__init__.py
"""Support for a Genius Hub system.""" from datetime import timedelta import logging from typing import Any, Dict, Optional import aiohttp from geniushubclient import GeniusHub import voluptuous as vol from homeassistant.const import ( ATTR_ENTITY_ID, ATTR_TEMPERATURE, CONF_HOST, CONF_MAC, CONF_PASSWORD, CONF_TOKEN, CONF_USERNAME, TEMP_CELSIUS, ) from homeassistant.core import callback from homeassistant.helpers import config_validation as cv from homeassistant.helpers.aiohttp_client import async_get_clientsession from homeassistant.helpers.discovery import async_load_platform from homeassistant.helpers.dispatcher import ( async_dispatcher_connect, async_dispatcher_send, ) from homeassistant.helpers.entity import Entity from homeassistant.helpers.event import async_track_time_interval from homeassistant.helpers.service import verify_domain_control from homeassistant.helpers.typing import ConfigType, HomeAssistantType import homeassistant.util.dt as dt_util _LOGGER = logging.getLogger(__name__) DOMAIN = "geniushub" # temperature is repeated here, as it gives access to high-precision temps GH_ZONE_ATTRS = ["mode", "temperature", "type", "occupied", "override"] GH_DEVICE_ATTRS = { "luminance": "luminance", "measuredTemperature": "measured_temperature", "occupancyTrigger": "occupancy_trigger", "setback": "setback", "setTemperature": "set_temperature", "wakeupInterval": "wakeup_interval", } SCAN_INTERVAL = timedelta(seconds=60) MAC_ADDRESS_REGEXP = r"^([0-9A-F]{2}:){5}([0-9A-F]{2})$" V1_API_SCHEMA = vol.Schema( { vol.Required(CONF_TOKEN): cv.string, vol.Required(CONF_MAC): vol.Match(MAC_ADDRESS_REGEXP), } ) V3_API_SCHEMA = vol.Schema( { vol.Required(CONF_HOST): cv.string, vol.Required(CONF_USERNAME): cv.string, vol.Required(CONF_PASSWORD): cv.string, vol.Optional(CONF_MAC): vol.Match(MAC_ADDRESS_REGEXP), } ) CONFIG_SCHEMA = vol.Schema( {DOMAIN: vol.Any(V3_API_SCHEMA, V1_API_SCHEMA)}, extra=vol.ALLOW_EXTRA ) ATTR_ZONE_MODE = "mode" ATTR_DURATION = "duration" SVC_SET_ZONE_MODE = "set_zone_mode" SVC_SET_ZONE_OVERRIDE = "set_zone_override" SET_ZONE_MODE_SCHEMA = vol.Schema( { vol.Required(ATTR_ENTITY_ID): cv.entity_id, vol.Required(ATTR_ZONE_MODE): vol.In(["off", "timer", "footprint"]), } ) SET_ZONE_OVERRIDE_SCHEMA = vol.Schema( { vol.Required(ATTR_ENTITY_ID): cv.entity_id, vol.Required(ATTR_TEMPERATURE): vol.All( vol.Coerce(float), vol.Range(min=4, max=28) ), vol.Optional(ATTR_DURATION): vol.All( cv.time_period, vol.Range(min=timedelta(minutes=5), max=timedelta(days=1)), ), } ) async def async_setup(hass: HomeAssistantType, config: ConfigType) -> bool: """Create a Genius Hub system.""" hass.data[DOMAIN] = {} kwargs = dict(config[DOMAIN]) if CONF_HOST in kwargs: args = (kwargs.pop(CONF_HOST),) else: args = (kwargs.pop(CONF_TOKEN),) hub_uid = kwargs.pop(CONF_MAC, None) client = GeniusHub(*args, **kwargs, session=async_get_clientsession(hass)) broker = hass.data[DOMAIN]["broker"] = GeniusBroker(hass, client, hub_uid) try: await client.update() except aiohttp.ClientResponseError as err: _LOGGER.error("Setup failed, check your configuration, %s", err) return False broker.make_debug_log_entries() async_track_time_interval(hass, broker.async_update, SCAN_INTERVAL) for platform in ["climate", "water_heater", "sensor", "binary_sensor", "switch"]: hass.async_create_task(async_load_platform(hass, platform, DOMAIN, {}, config)) setup_service_functions(hass, broker) return True @callback def setup_service_functions(hass: HomeAssistantType, broker): """Set up the service functions.""" @verify_domain_control(hass, DOMAIN) async def set_zone_mode(call) -> None: """Set the system mode.""" entity_id = call.data[ATTR_ENTITY_ID] registry = await hass.helpers.entity_registry.async_get_registry() registry_entry = registry.async_get(entity_id) if registry_entry is None or registry_entry.platform != DOMAIN: raise ValueError(f"'{entity_id}' is not a known {DOMAIN} entity") if registry_entry.domain != "climate": raise ValueError(f"'{entity_id}' is not an {DOMAIN} zone") payload = { "unique_id": registry_entry.unique_id, "service": call.service, "data": call.data, } async_dispatcher_send(hass, DOMAIN, payload) hass.services.async_register( DOMAIN, SVC_SET_ZONE_MODE, set_zone_mode, schema=SET_ZONE_MODE_SCHEMA ) hass.services.async_register( DOMAIN, SVC_SET_ZONE_OVERRIDE, set_zone_mode, schema=SET_ZONE_OVERRIDE_SCHEMA ) class GeniusBroker: """Container for geniushub client and data.""" def __init__(self, hass, client, hub_uid) -> None: """Initialize the geniushub client.""" self.hass = hass self.client = client self._hub_uid = hub_uid self._connect_error = False @property def hub_uid(self) -> int: """Return the Hub UID (MAC address).""" # pylint: disable=no-member return self._hub_uid if self._hub_uid is not None else self.client.uid async def async_update(self, now, **kwargs) -> None: """Update the geniushub client's data.""" try: await self.client.update() if self._connect_error: self._connect_error = False _LOGGER.warning("Connection to geniushub re-established") except ( aiohttp.ClientResponseError, aiohttp.client_exceptions.ClientConnectorError, ) as err: if not self._connect_error: self._connect_error = True _LOGGER.warning( "Connection to geniushub failed (unable to update), message is: %s", err, ) return self.make_debug_log_entries() async_dispatcher_send(self.hass, DOMAIN) def make_debug_log_entries(self) -> None: """Make any useful debug log entries.""" # pylint: disable=protected-access _LOGGER.debug( "Raw JSON: \n\nclient._zones = %s \n\nclient._devices = %s", self.client._zones, self.client._devices, ) class GeniusEntity(Entity): """Base for all Genius Hub entities.""" def __init__(self) -> None: """Initialize the entity.""" self._unique_id = self._name = None async def async_added_to_hass(self) -> None: """Set up a listener when this entity is added to HA.""" self.async_on_remove(async_dispatcher_connect(self.hass, DOMAIN, self._refresh)) async def _refresh(self, payload: Optional[dict] = None) -> None: """Process any signals.""" self.async_schedule_update_ha_state(force_refresh=True) @property def unique_id(self) -> Optional[str]: """Return a unique ID.""" return self._unique_id @property def name(self) -> str: """Return the name of the geniushub entity.""" return self._name @property def should_poll(self) -> bool: """Return False as geniushub entities should not be polled.""" return False class GeniusDevice(GeniusEntity): """Base for all Genius Hub devices.""" def __init__(self, broker, device) -> None: """Initialize the Device.""" super().__init__() self._device = device self._unique_id = f"{broker.hub_uid}_device_{device.id}" self._last_comms = self._state_attr = None @property def device_state_attributes(self) -> Dict[str, Any]: """Return the device state attributes.""" attrs = {} attrs["assigned_zone"] = self._device.data["assignedZones"][0]["name"] if self._last_comms: attrs["last_comms"] = self._last_comms.isoformat() state = dict(self._device.data["state"]) if "_state" in self._device.data: # only via v3 API state.update(self._device.data["_state"]) attrs["state"] = { GH_DEVICE_ATTRS[k]: v for k, v in state.items() if k in GH_DEVICE_ATTRS } return attrs async def async_update(self) -> None: """Update an entity's state data.""" if "_state" in self._device.data: # only via v3 API self._last_comms = dt_util.utc_from_timestamp( self._device.data["_state"]["lastComms"] ) class GeniusZone(GeniusEntity): """Base for all Genius Hub zones.""" def __init__(self, broker, zone) -> None: """Initialize the Zone.""" super().__init__() self._zone = zone self._unique_id = f"{broker.hub_uid}_zone_{zone.id}" async def _refresh(self, payload: Optional[dict] = None) -> None: """Process any signals.""" if payload is None: self.async_schedule_update_ha_state(force_refresh=True) return if payload["unique_id"] != self._unique_id: return if payload["service"] == SVC_SET_ZONE_OVERRIDE: temperature = round(payload["data"][ATTR_TEMPERATURE] * 10) / 10 duration = payload["data"].get(ATTR_DURATION, timedelta(hours=1)) await self._zone.set_override(temperature, int(duration.total_seconds())) return mode = payload["data"][ATTR_ZONE_MODE] # pylint: disable=protected-access if mode == "footprint" and not self._zone._has_pir: raise TypeError( f"'{self.entity_id}' can not support footprint mode (it has no PIR)" ) await self._zone.set_mode(mode) @property def name(self) -> str: """Return the name of the climate device.""" return self._zone.name @property def device_state_attributes(self) -> Dict[str, Any]: """Return the device state attributes.""" status = {k: v for k, v in self._zone.data.items() if k in GH_ZONE_ATTRS} return {"status": status} class GeniusHeatingZone(GeniusZone): """Base for Genius Heating Zones.""" def __init__(self, broker, zone) -> None: """Initialize the Zone.""" super().__init__(broker, zone) self._max_temp = self._min_temp = self._supported_features = None @property def current_temperature(self) -> Optional[float]: """Return the current temperature.""" return self._zone.data.get("temperature") @property def target_temperature(self) -> float: """Return the temperature we try to reach.""" return self._zone.data["setpoint"] @property def min_temp(self) -> float: """Return max valid temperature that can be set.""" return self._min_temp @property def max_temp(self) -> float: """Return max valid temperature that can be set.""" return self._max_temp @property def temperature_unit(self) -> str: """Return the unit of measurement.""" return TEMP_CELSIUS @property def supported_features(self) -> int: """Return the bitmask of supported features.""" return self._supported_features async def async_set_temperature(self, **kwargs) -> None: """Set a new target temperature for this zone.""" await self._zone.set_override( kwargs[ATTR_TEMPERATURE], kwargs.get(ATTR_DURATION, 3600) )
"""Tests for the SolarEdge config flow.""" import pytest from requests.exceptions import ConnectTimeout, HTTPError from homeassistant import data_entry_flow from homeassistant.components.solaredge import config_flow from homeassistant.components.solaredge.const import CONF_SITE_ID, DEFAULT_NAME from homeassistant.const import CONF_API_KEY, CONF_NAME from tests.async_mock import Mock, patch from tests.common import MockConfigEntry NAME = "solaredge site 1 2 3" SITE_ID = "1a2b3c4d5e6f7g8h" API_KEY = "a1b2c3d4e5f6g7h8" @pytest.fixture(name="test_api") def mock_controller(): """Mock a successful Solaredge API.""" api = Mock() api.get_details.return_value = {"details": {"status": "active"}} with patch("solaredge.Solaredge", return_value=api): yield api def init_config_flow(hass): """Init a configuration flow.""" flow = config_flow.SolarEdgeConfigFlow() flow.hass = hass return flow async def test_user(hass, test_api): """Test user config.""" flow = init_config_flow(hass) result = await flow.async_step_user() assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "user" # tets with all provided result = await flow.async_step_user( {CONF_NAME: NAME, CONF_API_KEY: API_KEY, CONF_SITE_ID: SITE_ID} ) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["title"] == "solaredge_site_1_2_3" assert result["data"][CONF_SITE_ID] == SITE_ID assert result["data"][CONF_API_KEY] == API_KEY async def test_import(hass, test_api): """Test import step.""" flow = init_config_flow(hass) # import with site_id and api_key result = await flow.async_step_import( {CONF_API_KEY: API_KEY, CONF_SITE_ID: SITE_ID} ) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["title"] == "solaredge" assert result["data"][CONF_SITE_ID] == SITE_ID assert result["data"][CONF_API_KEY] == API_KEY # import with all result = await flow.async_step_import( {CONF_API_KEY: API_KEY, CONF_SITE_ID: SITE_ID, CONF_NAME: NAME} ) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["title"] == "solaredge_site_1_2_3" assert result["data"][CONF_SITE_ID] == SITE_ID assert result["data"][CONF_API_KEY] == API_KEY async def test_abort_if_already_setup(hass, test_api): """Test we abort if the site_id is already setup.""" flow = init_config_flow(hass) MockConfigEntry( domain="solaredge", data={CONF_NAME: DEFAULT_NAME, CONF_SITE_ID: SITE_ID, CONF_API_KEY: API_KEY}, ).add_to_hass(hass) # import: Should fail, same SITE_ID result = await flow.async_step_import( {CONF_NAME: DEFAULT_NAME, CONF_SITE_ID: SITE_ID, CONF_API_KEY: API_KEY} ) assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT assert result["reason"] == "site_exists" # user: Should fail, same SITE_ID result = await flow.async_step_user( {CONF_NAME: "test", CONF_SITE_ID: SITE_ID, CONF_API_KEY: "test"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["errors"] == {CONF_SITE_ID: "site_exists"} async def test_asserts(hass, test_api): """Test the _site_in_configuration_exists method.""" flow = init_config_flow(hass) # test with inactive site test_api.get_details.return_value = {"details": {"status": "NOK"}} result = await flow.async_step_user( {CONF_NAME: NAME, CONF_API_KEY: API_KEY, CONF_SITE_ID: SITE_ID} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["errors"] == {CONF_SITE_ID: "site_not_active"} # test with api_failure test_api.get_details.return_value = {} result = await flow.async_step_user( {CONF_NAME: NAME, CONF_API_KEY: API_KEY, CONF_SITE_ID: SITE_ID} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["errors"] == {CONF_SITE_ID: "api_failure"} # test with ConnectionTimeout test_api.get_details.side_effect = ConnectTimeout() result = await flow.async_step_user( {CONF_NAME: NAME, CONF_API_KEY: API_KEY, CONF_SITE_ID: SITE_ID} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["errors"] == {CONF_SITE_ID: "could_not_connect"} # test with HTTPError test_api.get_details.side_effect = HTTPError() result = await flow.async_step_user( {CONF_NAME: NAME, CONF_API_KEY: API_KEY, CONF_SITE_ID: SITE_ID} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["errors"] == {CONF_SITE_ID: "could_not_connect"}
robbiet480/home-assistant
tests/components/solaredge/test_config_flow.py
homeassistant/components/geniushub/__init__.py
"""Offer webhook triggered automation rules.""" from functools import partial import logging from aiohttp import hdrs import voluptuous as vol from homeassistant.const import CONF_PLATFORM, CONF_WEBHOOK_ID from homeassistant.core import callback import homeassistant.helpers.config_validation as cv from . import DOMAIN as AUTOMATION_DOMAIN # mypy: allow-untyped-defs DEPENDENCIES = ("webhook",) _LOGGER = logging.getLogger(__name__) TRIGGER_SCHEMA = vol.Schema( {vol.Required(CONF_PLATFORM): "webhook", vol.Required(CONF_WEBHOOK_ID): cv.string} ) async def _handle_webhook(action, hass, webhook_id, request): """Handle incoming webhook.""" result = {"platform": "webhook", "webhook_id": webhook_id} if "json" in request.headers.get(hdrs.CONTENT_TYPE, ""): result["json"] = await request.json() else: result["data"] = await request.post() result["query"] = request.query hass.async_run_job(action, {"trigger": result}) async def async_attach_trigger(hass, config, action, automation_info): """Trigger based on incoming webhooks.""" webhook_id = config.get(CONF_WEBHOOK_ID) hass.components.webhook.async_register( AUTOMATION_DOMAIN, automation_info["name"], webhook_id, partial(_handle_webhook, action), ) @callback def unregister(): """Unregister webhook.""" hass.components.webhook.async_unregister(webhook_id) return unregister
"""Tests for the SolarEdge config flow.""" import pytest from requests.exceptions import ConnectTimeout, HTTPError from homeassistant import data_entry_flow from homeassistant.components.solaredge import config_flow from homeassistant.components.solaredge.const import CONF_SITE_ID, DEFAULT_NAME from homeassistant.const import CONF_API_KEY, CONF_NAME from tests.async_mock import Mock, patch from tests.common import MockConfigEntry NAME = "solaredge site 1 2 3" SITE_ID = "1a2b3c4d5e6f7g8h" API_KEY = "a1b2c3d4e5f6g7h8" @pytest.fixture(name="test_api") def mock_controller(): """Mock a successful Solaredge API.""" api = Mock() api.get_details.return_value = {"details": {"status": "active"}} with patch("solaredge.Solaredge", return_value=api): yield api def init_config_flow(hass): """Init a configuration flow.""" flow = config_flow.SolarEdgeConfigFlow() flow.hass = hass return flow async def test_user(hass, test_api): """Test user config.""" flow = init_config_flow(hass) result = await flow.async_step_user() assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "user" # tets with all provided result = await flow.async_step_user( {CONF_NAME: NAME, CONF_API_KEY: API_KEY, CONF_SITE_ID: SITE_ID} ) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["title"] == "solaredge_site_1_2_3" assert result["data"][CONF_SITE_ID] == SITE_ID assert result["data"][CONF_API_KEY] == API_KEY async def test_import(hass, test_api): """Test import step.""" flow = init_config_flow(hass) # import with site_id and api_key result = await flow.async_step_import( {CONF_API_KEY: API_KEY, CONF_SITE_ID: SITE_ID} ) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["title"] == "solaredge" assert result["data"][CONF_SITE_ID] == SITE_ID assert result["data"][CONF_API_KEY] == API_KEY # import with all result = await flow.async_step_import( {CONF_API_KEY: API_KEY, CONF_SITE_ID: SITE_ID, CONF_NAME: NAME} ) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["title"] == "solaredge_site_1_2_3" assert result["data"][CONF_SITE_ID] == SITE_ID assert result["data"][CONF_API_KEY] == API_KEY async def test_abort_if_already_setup(hass, test_api): """Test we abort if the site_id is already setup.""" flow = init_config_flow(hass) MockConfigEntry( domain="solaredge", data={CONF_NAME: DEFAULT_NAME, CONF_SITE_ID: SITE_ID, CONF_API_KEY: API_KEY}, ).add_to_hass(hass) # import: Should fail, same SITE_ID result = await flow.async_step_import( {CONF_NAME: DEFAULT_NAME, CONF_SITE_ID: SITE_ID, CONF_API_KEY: API_KEY} ) assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT assert result["reason"] == "site_exists" # user: Should fail, same SITE_ID result = await flow.async_step_user( {CONF_NAME: "test", CONF_SITE_ID: SITE_ID, CONF_API_KEY: "test"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["errors"] == {CONF_SITE_ID: "site_exists"} async def test_asserts(hass, test_api): """Test the _site_in_configuration_exists method.""" flow = init_config_flow(hass) # test with inactive site test_api.get_details.return_value = {"details": {"status": "NOK"}} result = await flow.async_step_user( {CONF_NAME: NAME, CONF_API_KEY: API_KEY, CONF_SITE_ID: SITE_ID} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["errors"] == {CONF_SITE_ID: "site_not_active"} # test with api_failure test_api.get_details.return_value = {} result = await flow.async_step_user( {CONF_NAME: NAME, CONF_API_KEY: API_KEY, CONF_SITE_ID: SITE_ID} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["errors"] == {CONF_SITE_ID: "api_failure"} # test with ConnectionTimeout test_api.get_details.side_effect = ConnectTimeout() result = await flow.async_step_user( {CONF_NAME: NAME, CONF_API_KEY: API_KEY, CONF_SITE_ID: SITE_ID} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["errors"] == {CONF_SITE_ID: "could_not_connect"} # test with HTTPError test_api.get_details.side_effect = HTTPError() result = await flow.async_step_user( {CONF_NAME: NAME, CONF_API_KEY: API_KEY, CONF_SITE_ID: SITE_ID} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["errors"] == {CONF_SITE_ID: "could_not_connect"}
robbiet480/home-assistant
tests/components/solaredge/test_config_flow.py
homeassistant/components/automation/webhook.py
"""Support for the light on the Sisyphus Kinetic Art Table.""" import logging import aiohttp from homeassistant.components.light import SUPPORT_BRIGHTNESS, LightEntity from homeassistant.const import CONF_HOST from homeassistant.exceptions import PlatformNotReady from . import DATA_SISYPHUS _LOGGER = logging.getLogger(__name__) SUPPORTED_FEATURES = SUPPORT_BRIGHTNESS async def async_setup_platform(hass, config, add_entities, discovery_info=None): """Set up a single Sisyphus table.""" host = discovery_info[CONF_HOST] try: table_holder = hass.data[DATA_SISYPHUS][host] table = await table_holder.get_table() except aiohttp.ClientError: raise PlatformNotReady() add_entities([SisyphusLight(table_holder.name, table)], update_before_add=True) class SisyphusLight(LightEntity): """Representation of a Sisyphus table as a light.""" def __init__(self, name, table): """Initialize the Sisyphus table.""" self._name = name self._table = table async def async_added_to_hass(self): """Add listeners after this object has been initialized.""" self._table.add_listener(self.async_write_ha_state) @property def available(self): """Return true if the table is responding to heartbeats.""" return self._table.is_connected @property def unique_id(self): """Return the UUID of the table.""" return self._table.id @property def name(self): """Return the ame of the table.""" return self._name @property def is_on(self): """Return True if the table is on.""" return not self._table.is_sleeping @property def brightness(self): """Return the current brightness of the table's ring light.""" return self._table.brightness * 255 @property def supported_features(self): """Return the features supported by the table; i.e. brightness.""" return SUPPORTED_FEATURES async def async_turn_off(self, **kwargs): """Put the table to sleep.""" await self._table.sleep() _LOGGER.debug("Sisyphus table %s: sleep") async def async_turn_on(self, **kwargs): """Wake up the table if necessary, optionally changes brightness.""" if not self.is_on: await self._table.wakeup() _LOGGER.debug("Sisyphus table %s: wakeup") if "brightness" in kwargs: await self._table.set_brightness(kwargs["brightness"] / 255.0)
"""Tests for the SolarEdge config flow.""" import pytest from requests.exceptions import ConnectTimeout, HTTPError from homeassistant import data_entry_flow from homeassistant.components.solaredge import config_flow from homeassistant.components.solaredge.const import CONF_SITE_ID, DEFAULT_NAME from homeassistant.const import CONF_API_KEY, CONF_NAME from tests.async_mock import Mock, patch from tests.common import MockConfigEntry NAME = "solaredge site 1 2 3" SITE_ID = "1a2b3c4d5e6f7g8h" API_KEY = "a1b2c3d4e5f6g7h8" @pytest.fixture(name="test_api") def mock_controller(): """Mock a successful Solaredge API.""" api = Mock() api.get_details.return_value = {"details": {"status": "active"}} with patch("solaredge.Solaredge", return_value=api): yield api def init_config_flow(hass): """Init a configuration flow.""" flow = config_flow.SolarEdgeConfigFlow() flow.hass = hass return flow async def test_user(hass, test_api): """Test user config.""" flow = init_config_flow(hass) result = await flow.async_step_user() assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "user" # tets with all provided result = await flow.async_step_user( {CONF_NAME: NAME, CONF_API_KEY: API_KEY, CONF_SITE_ID: SITE_ID} ) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["title"] == "solaredge_site_1_2_3" assert result["data"][CONF_SITE_ID] == SITE_ID assert result["data"][CONF_API_KEY] == API_KEY async def test_import(hass, test_api): """Test import step.""" flow = init_config_flow(hass) # import with site_id and api_key result = await flow.async_step_import( {CONF_API_KEY: API_KEY, CONF_SITE_ID: SITE_ID} ) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["title"] == "solaredge" assert result["data"][CONF_SITE_ID] == SITE_ID assert result["data"][CONF_API_KEY] == API_KEY # import with all result = await flow.async_step_import( {CONF_API_KEY: API_KEY, CONF_SITE_ID: SITE_ID, CONF_NAME: NAME} ) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["title"] == "solaredge_site_1_2_3" assert result["data"][CONF_SITE_ID] == SITE_ID assert result["data"][CONF_API_KEY] == API_KEY async def test_abort_if_already_setup(hass, test_api): """Test we abort if the site_id is already setup.""" flow = init_config_flow(hass) MockConfigEntry( domain="solaredge", data={CONF_NAME: DEFAULT_NAME, CONF_SITE_ID: SITE_ID, CONF_API_KEY: API_KEY}, ).add_to_hass(hass) # import: Should fail, same SITE_ID result = await flow.async_step_import( {CONF_NAME: DEFAULT_NAME, CONF_SITE_ID: SITE_ID, CONF_API_KEY: API_KEY} ) assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT assert result["reason"] == "site_exists" # user: Should fail, same SITE_ID result = await flow.async_step_user( {CONF_NAME: "test", CONF_SITE_ID: SITE_ID, CONF_API_KEY: "test"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["errors"] == {CONF_SITE_ID: "site_exists"} async def test_asserts(hass, test_api): """Test the _site_in_configuration_exists method.""" flow = init_config_flow(hass) # test with inactive site test_api.get_details.return_value = {"details": {"status": "NOK"}} result = await flow.async_step_user( {CONF_NAME: NAME, CONF_API_KEY: API_KEY, CONF_SITE_ID: SITE_ID} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["errors"] == {CONF_SITE_ID: "site_not_active"} # test with api_failure test_api.get_details.return_value = {} result = await flow.async_step_user( {CONF_NAME: NAME, CONF_API_KEY: API_KEY, CONF_SITE_ID: SITE_ID} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["errors"] == {CONF_SITE_ID: "api_failure"} # test with ConnectionTimeout test_api.get_details.side_effect = ConnectTimeout() result = await flow.async_step_user( {CONF_NAME: NAME, CONF_API_KEY: API_KEY, CONF_SITE_ID: SITE_ID} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["errors"] == {CONF_SITE_ID: "could_not_connect"} # test with HTTPError test_api.get_details.side_effect = HTTPError() result = await flow.async_step_user( {CONF_NAME: NAME, CONF_API_KEY: API_KEY, CONF_SITE_ID: SITE_ID} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["errors"] == {CONF_SITE_ID: "could_not_connect"}
robbiet480/home-assistant
tests/components/solaredge/test_config_flow.py
homeassistant/components/sisyphus/light.py
"""Support for system log.""" from collections import OrderedDict, deque import logging import re import traceback import voluptuous as vol from homeassistant import __path__ as HOMEASSISTANT_PATH from homeassistant.components.http import HomeAssistantView from homeassistant.const import EVENT_HOMEASSISTANT_STOP import homeassistant.helpers.config_validation as cv CONF_MAX_ENTRIES = "max_entries" CONF_FIRE_EVENT = "fire_event" CONF_MESSAGE = "message" CONF_LEVEL = "level" CONF_LOGGER = "logger" DATA_SYSTEM_LOG = "system_log" DEFAULT_MAX_ENTRIES = 50 DEFAULT_FIRE_EVENT = False DOMAIN = "system_log" EVENT_SYSTEM_LOG = "system_log_event" SERVICE_CLEAR = "clear" SERVICE_WRITE = "write" CONFIG_SCHEMA = vol.Schema( { DOMAIN: vol.Schema( { vol.Optional( CONF_MAX_ENTRIES, default=DEFAULT_MAX_ENTRIES ): cv.positive_int, vol.Optional(CONF_FIRE_EVENT, default=DEFAULT_FIRE_EVENT): cv.boolean, } ) }, extra=vol.ALLOW_EXTRA, ) SERVICE_CLEAR_SCHEMA = vol.Schema({}) SERVICE_WRITE_SCHEMA = vol.Schema( { vol.Required(CONF_MESSAGE): cv.string, vol.Optional(CONF_LEVEL, default="error"): vol.In( ["debug", "info", "warning", "error", "critical"] ), vol.Optional(CONF_LOGGER): cv.string, } ) def _figure_out_source(record, call_stack, hass): paths = [HOMEASSISTANT_PATH[0], hass.config.config_dir] # If a stack trace exists, extract file names from the entire call stack. # The other case is when a regular "log" is made (without an attached # exception). In that case, just use the file where the log was made from. if record.exc_info: stack = [(x[0], x[1]) for x in traceback.extract_tb(record.exc_info[2])] else: index = -1 for i, frame in enumerate(call_stack): if frame[0] == record.pathname: index = i break if index == -1: # For some reason we couldn't find pathname in the stack. stack = [(record.pathname, record.lineno)] else: stack = call_stack[0 : index + 1] # Iterate through the stack call (in reverse) and find the last call from # a file in Home Assistant. Try to figure out where error happened. paths_re = r"(?:{})/(.*)".format("|".join([re.escape(x) for x in paths])) for pathname in reversed(stack): # Try to match with a file within Home Assistant match = re.match(paths_re, pathname[0]) if match: return [match.group(1), pathname[1]] # Ok, we don't know what this is return (record.pathname, record.lineno) class LogEntry: """Store HA log entries.""" def __init__(self, record, stack, source): """Initialize a log entry.""" self.first_occurred = self.timestamp = record.created self.name = record.name self.level = record.levelname self.message = deque([record.getMessage()], maxlen=5) self.exception = "" self.root_cause = None if record.exc_info: self.exception = "".join(traceback.format_exception(*record.exc_info)) _, _, tb = record.exc_info # pylint: disable=invalid-name # Last line of traceback contains the root cause of the exception if traceback.extract_tb(tb): self.root_cause = str(traceback.extract_tb(tb)[-1]) self.source = source self.count = 1 self.hash = str([self.name, *self.source, self.root_cause]) def to_dict(self): """Convert object into dict to maintain backward compatibility.""" return { "name": self.name, "message": list(self.message), "level": self.level, "source": self.source, "timestamp": self.timestamp, "exception": self.exception, "count": self.count, "first_occurred": self.first_occurred, } class DedupStore(OrderedDict): """Data store to hold max amount of deduped entries.""" def __init__(self, maxlen=50): """Initialize a new DedupStore.""" super().__init__() self.maxlen = maxlen def add_entry(self, entry): """Add a new entry.""" key = entry.hash if key in self: # Update stored entry existing = self[key] existing.count += 1 existing.timestamp = entry.timestamp if entry.message[0] not in existing.message: existing.message.append(entry.message[0]) self.move_to_end(key) else: self[key] = entry if len(self) > self.maxlen: # Removes the first record which should also be the oldest self.popitem(last=False) def to_list(self): """Return reversed list of log entries - LIFO.""" return [value.to_dict() for value in reversed(self.values())] class LogErrorHandler(logging.Handler): """Log handler for error messages.""" def __init__(self, hass, maxlen, fire_event): """Initialize a new LogErrorHandler.""" super().__init__() self.hass = hass self.records = DedupStore(maxlen=maxlen) self.fire_event = fire_event def emit(self, record): """Save error and warning logs. Everything logged with error or warning is saved in local buffer. A default upper limit is set to 50 (older entries are discarded) but can be changed if needed. """ if record.levelno >= logging.WARN: stack = [] if not record.exc_info: stack = [(f[0], f[1]) for f in traceback.extract_stack()] entry = LogEntry( record, stack, _figure_out_source(record, stack, self.hass) ) self.records.add_entry(entry) if self.fire_event: self.hass.bus.fire(EVENT_SYSTEM_LOG, entry.to_dict()) async def async_setup(hass, config): """Set up the logger component.""" conf = config.get(DOMAIN) if conf is None: conf = CONFIG_SCHEMA({DOMAIN: {}})[DOMAIN] handler = LogErrorHandler(hass, conf[CONF_MAX_ENTRIES], conf[CONF_FIRE_EVENT]) logging.getLogger().addHandler(handler) hass.http.register_view(AllErrorsView(handler)) async def async_service_handler(service): """Handle logger services.""" if service.service == "clear": handler.records.clear() return if service.service == "write": logger = logging.getLogger( service.data.get(CONF_LOGGER, f"{__name__}.external") ) level = service.data[CONF_LEVEL] getattr(logger, level)(service.data[CONF_MESSAGE]) async def async_shutdown_handler(event): """Remove logging handler when Home Assistant is shutdown.""" # This is needed as older logger instances will remain logging.getLogger().removeHandler(handler) hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, async_shutdown_handler) hass.services.async_register( DOMAIN, SERVICE_CLEAR, async_service_handler, schema=SERVICE_CLEAR_SCHEMA ) hass.services.async_register( DOMAIN, SERVICE_WRITE, async_service_handler, schema=SERVICE_WRITE_SCHEMA ) return True class AllErrorsView(HomeAssistantView): """Get all logged errors and warnings.""" url = "/api/error/all" name = "api:error:all" def __init__(self, handler): """Initialize a new AllErrorsView.""" self.handler = handler async def get(self, request): """Get all errors and warnings.""" return self.json(self.handler.records.to_list())
"""Tests for the SolarEdge config flow.""" import pytest from requests.exceptions import ConnectTimeout, HTTPError from homeassistant import data_entry_flow from homeassistant.components.solaredge import config_flow from homeassistant.components.solaredge.const import CONF_SITE_ID, DEFAULT_NAME from homeassistant.const import CONF_API_KEY, CONF_NAME from tests.async_mock import Mock, patch from tests.common import MockConfigEntry NAME = "solaredge site 1 2 3" SITE_ID = "1a2b3c4d5e6f7g8h" API_KEY = "a1b2c3d4e5f6g7h8" @pytest.fixture(name="test_api") def mock_controller(): """Mock a successful Solaredge API.""" api = Mock() api.get_details.return_value = {"details": {"status": "active"}} with patch("solaredge.Solaredge", return_value=api): yield api def init_config_flow(hass): """Init a configuration flow.""" flow = config_flow.SolarEdgeConfigFlow() flow.hass = hass return flow async def test_user(hass, test_api): """Test user config.""" flow = init_config_flow(hass) result = await flow.async_step_user() assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "user" # tets with all provided result = await flow.async_step_user( {CONF_NAME: NAME, CONF_API_KEY: API_KEY, CONF_SITE_ID: SITE_ID} ) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["title"] == "solaredge_site_1_2_3" assert result["data"][CONF_SITE_ID] == SITE_ID assert result["data"][CONF_API_KEY] == API_KEY async def test_import(hass, test_api): """Test import step.""" flow = init_config_flow(hass) # import with site_id and api_key result = await flow.async_step_import( {CONF_API_KEY: API_KEY, CONF_SITE_ID: SITE_ID} ) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["title"] == "solaredge" assert result["data"][CONF_SITE_ID] == SITE_ID assert result["data"][CONF_API_KEY] == API_KEY # import with all result = await flow.async_step_import( {CONF_API_KEY: API_KEY, CONF_SITE_ID: SITE_ID, CONF_NAME: NAME} ) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["title"] == "solaredge_site_1_2_3" assert result["data"][CONF_SITE_ID] == SITE_ID assert result["data"][CONF_API_KEY] == API_KEY async def test_abort_if_already_setup(hass, test_api): """Test we abort if the site_id is already setup.""" flow = init_config_flow(hass) MockConfigEntry( domain="solaredge", data={CONF_NAME: DEFAULT_NAME, CONF_SITE_ID: SITE_ID, CONF_API_KEY: API_KEY}, ).add_to_hass(hass) # import: Should fail, same SITE_ID result = await flow.async_step_import( {CONF_NAME: DEFAULT_NAME, CONF_SITE_ID: SITE_ID, CONF_API_KEY: API_KEY} ) assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT assert result["reason"] == "site_exists" # user: Should fail, same SITE_ID result = await flow.async_step_user( {CONF_NAME: "test", CONF_SITE_ID: SITE_ID, CONF_API_KEY: "test"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["errors"] == {CONF_SITE_ID: "site_exists"} async def test_asserts(hass, test_api): """Test the _site_in_configuration_exists method.""" flow = init_config_flow(hass) # test with inactive site test_api.get_details.return_value = {"details": {"status": "NOK"}} result = await flow.async_step_user( {CONF_NAME: NAME, CONF_API_KEY: API_KEY, CONF_SITE_ID: SITE_ID} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["errors"] == {CONF_SITE_ID: "site_not_active"} # test with api_failure test_api.get_details.return_value = {} result = await flow.async_step_user( {CONF_NAME: NAME, CONF_API_KEY: API_KEY, CONF_SITE_ID: SITE_ID} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["errors"] == {CONF_SITE_ID: "api_failure"} # test with ConnectionTimeout test_api.get_details.side_effect = ConnectTimeout() result = await flow.async_step_user( {CONF_NAME: NAME, CONF_API_KEY: API_KEY, CONF_SITE_ID: SITE_ID} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["errors"] == {CONF_SITE_ID: "could_not_connect"} # test with HTTPError test_api.get_details.side_effect = HTTPError() result = await flow.async_step_user( {CONF_NAME: NAME, CONF_API_KEY: API_KEY, CONF_SITE_ID: SITE_ID} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["errors"] == {CONF_SITE_ID: "could_not_connect"}
robbiet480/home-assistant
tests/components/solaredge/test_config_flow.py
homeassistant/components/system_log/__init__.py
"""Component for the Somfy MyLink device supporting the Synergy API.""" import logging from somfy_mylink_synergy import SomfyMyLinkSynergy import voluptuous as vol from homeassistant.const import CONF_HOST, CONF_PORT from homeassistant.helpers import config_validation as cv from homeassistant.helpers.discovery import async_load_platform _LOGGER = logging.getLogger(__name__) CONF_ENTITY_CONFIG = "entity_config" CONF_SYSTEM_ID = "system_id" CONF_REVERSE = "reverse" CONF_DEFAULT_REVERSE = "default_reverse" DATA_SOMFY_MYLINK = "somfy_mylink_data" DOMAIN = "somfy_mylink" SOMFY_MYLINK_COMPONENTS = ["cover"] def validate_entity_config(values): """Validate config entry for CONF_ENTITY.""" entity_config_schema = vol.Schema({vol.Optional(CONF_REVERSE): cv.boolean}) if not isinstance(values, dict): raise vol.Invalid("expected a dictionary") entities = {} for entity_id, config in values.items(): entity = cv.entity_id(entity_id) config = entity_config_schema(config) entities[entity] = config return entities CONFIG_SCHEMA = vol.Schema( { DOMAIN: vol.Schema( { vol.Required(CONF_SYSTEM_ID): cv.string, vol.Required(CONF_HOST): cv.string, vol.Optional(CONF_PORT, default=44100): cv.port, vol.Optional(CONF_DEFAULT_REVERSE, default=False): cv.boolean, vol.Optional(CONF_ENTITY_CONFIG, default={}): validate_entity_config, } ) }, extra=vol.ALLOW_EXTRA, ) async def async_setup(hass, config): """Set up the MyLink platform.""" host = config[DOMAIN][CONF_HOST] port = config[DOMAIN][CONF_PORT] system_id = config[DOMAIN][CONF_SYSTEM_ID] entity_config = config[DOMAIN][CONF_ENTITY_CONFIG] entity_config[CONF_DEFAULT_REVERSE] = config[DOMAIN][CONF_DEFAULT_REVERSE] somfy_mylink = SomfyMyLinkSynergy(system_id, host, port) hass.data[DATA_SOMFY_MYLINK] = somfy_mylink for component in SOMFY_MYLINK_COMPONENTS: hass.async_create_task( async_load_platform(hass, component, DOMAIN, entity_config, config) ) return True
"""Tests for the SolarEdge config flow.""" import pytest from requests.exceptions import ConnectTimeout, HTTPError from homeassistant import data_entry_flow from homeassistant.components.solaredge import config_flow from homeassistant.components.solaredge.const import CONF_SITE_ID, DEFAULT_NAME from homeassistant.const import CONF_API_KEY, CONF_NAME from tests.async_mock import Mock, patch from tests.common import MockConfigEntry NAME = "solaredge site 1 2 3" SITE_ID = "1a2b3c4d5e6f7g8h" API_KEY = "a1b2c3d4e5f6g7h8" @pytest.fixture(name="test_api") def mock_controller(): """Mock a successful Solaredge API.""" api = Mock() api.get_details.return_value = {"details": {"status": "active"}} with patch("solaredge.Solaredge", return_value=api): yield api def init_config_flow(hass): """Init a configuration flow.""" flow = config_flow.SolarEdgeConfigFlow() flow.hass = hass return flow async def test_user(hass, test_api): """Test user config.""" flow = init_config_flow(hass) result = await flow.async_step_user() assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "user" # tets with all provided result = await flow.async_step_user( {CONF_NAME: NAME, CONF_API_KEY: API_KEY, CONF_SITE_ID: SITE_ID} ) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["title"] == "solaredge_site_1_2_3" assert result["data"][CONF_SITE_ID] == SITE_ID assert result["data"][CONF_API_KEY] == API_KEY async def test_import(hass, test_api): """Test import step.""" flow = init_config_flow(hass) # import with site_id and api_key result = await flow.async_step_import( {CONF_API_KEY: API_KEY, CONF_SITE_ID: SITE_ID} ) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["title"] == "solaredge" assert result["data"][CONF_SITE_ID] == SITE_ID assert result["data"][CONF_API_KEY] == API_KEY # import with all result = await flow.async_step_import( {CONF_API_KEY: API_KEY, CONF_SITE_ID: SITE_ID, CONF_NAME: NAME} ) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["title"] == "solaredge_site_1_2_3" assert result["data"][CONF_SITE_ID] == SITE_ID assert result["data"][CONF_API_KEY] == API_KEY async def test_abort_if_already_setup(hass, test_api): """Test we abort if the site_id is already setup.""" flow = init_config_flow(hass) MockConfigEntry( domain="solaredge", data={CONF_NAME: DEFAULT_NAME, CONF_SITE_ID: SITE_ID, CONF_API_KEY: API_KEY}, ).add_to_hass(hass) # import: Should fail, same SITE_ID result = await flow.async_step_import( {CONF_NAME: DEFAULT_NAME, CONF_SITE_ID: SITE_ID, CONF_API_KEY: API_KEY} ) assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT assert result["reason"] == "site_exists" # user: Should fail, same SITE_ID result = await flow.async_step_user( {CONF_NAME: "test", CONF_SITE_ID: SITE_ID, CONF_API_KEY: "test"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["errors"] == {CONF_SITE_ID: "site_exists"} async def test_asserts(hass, test_api): """Test the _site_in_configuration_exists method.""" flow = init_config_flow(hass) # test with inactive site test_api.get_details.return_value = {"details": {"status": "NOK"}} result = await flow.async_step_user( {CONF_NAME: NAME, CONF_API_KEY: API_KEY, CONF_SITE_ID: SITE_ID} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["errors"] == {CONF_SITE_ID: "site_not_active"} # test with api_failure test_api.get_details.return_value = {} result = await flow.async_step_user( {CONF_NAME: NAME, CONF_API_KEY: API_KEY, CONF_SITE_ID: SITE_ID} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["errors"] == {CONF_SITE_ID: "api_failure"} # test with ConnectionTimeout test_api.get_details.side_effect = ConnectTimeout() result = await flow.async_step_user( {CONF_NAME: NAME, CONF_API_KEY: API_KEY, CONF_SITE_ID: SITE_ID} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["errors"] == {CONF_SITE_ID: "could_not_connect"} # test with HTTPError test_api.get_details.side_effect = HTTPError() result = await flow.async_step_user( {CONF_NAME: NAME, CONF_API_KEY: API_KEY, CONF_SITE_ID: SITE_ID} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["errors"] == {CONF_SITE_ID: "could_not_connect"}
robbiet480/home-assistant
tests/components/solaredge/test_config_flow.py
homeassistant/components/somfy_mylink/__init__.py
"""Support for the Rainforest Eagle-200 energy monitor.""" from datetime import timedelta import logging from eagle200_reader import EagleReader from requests.exceptions import ConnectionError as ConnectError, HTTPError, Timeout from uEagle import Eagle as LegacyReader import voluptuous as vol from homeassistant.components.sensor import PLATFORM_SCHEMA from homeassistant.const import ( CONF_IP_ADDRESS, DEVICE_CLASS_POWER, ENERGY_KILO_WATT_HOUR, ) import homeassistant.helpers.config_validation as cv from homeassistant.helpers.entity import Entity from homeassistant.util import Throttle CONF_CLOUD_ID = "cloud_id" CONF_INSTALL_CODE = "install_code" POWER_KILO_WATT = "kW" _LOGGER = logging.getLogger(__name__) MIN_SCAN_INTERVAL = timedelta(seconds=30) SENSORS = { "instantanous_demand": ("Eagle-200 Meter Power Demand", POWER_KILO_WATT), "summation_delivered": ( "Eagle-200 Total Meter Energy Delivered", ENERGY_KILO_WATT_HOUR, ), "summation_received": ( "Eagle-200 Total Meter Energy Received", ENERGY_KILO_WATT_HOUR, ), "summation_total": ( "Eagle-200 Net Meter Energy (Delivered minus Received)", ENERGY_KILO_WATT_HOUR, ), } PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( { vol.Required(CONF_IP_ADDRESS): cv.string, vol.Required(CONF_CLOUD_ID): cv.string, vol.Required(CONF_INSTALL_CODE): cv.string, } ) def hwtest(cloud_id, install_code, ip_address): """Try API call 'get_network_info' to see if target device is Legacy or Eagle-200.""" reader = LeagleReader(cloud_id, install_code, ip_address) response = reader.get_network_info() # Branch to test if target is Legacy Model if "NetworkInfo" in response: if response["NetworkInfo"].get("ModelId", None) == "Z109-EAGLE": return reader # Branch to test if target is Eagle-200 Model if "Response" in response: if response["Response"].get("Command", None) == "get_network_info": return EagleReader(ip_address, cloud_id, install_code) # Catch-all if hardware ID tests fail raise ValueError("Couldn't determine device model.") def setup_platform(hass, config, add_entities, discovery_info=None): """Create the Eagle-200 sensor.""" ip_address = config[CONF_IP_ADDRESS] cloud_id = config[CONF_CLOUD_ID] install_code = config[CONF_INSTALL_CODE] try: eagle_reader = hwtest(cloud_id, install_code, ip_address) except (ConnectError, HTTPError, Timeout, ValueError) as error: _LOGGER.error("Failed to connect during setup: %s", error) return eagle_data = EagleData(eagle_reader) eagle_data.update() monitored_conditions = list(SENSORS) sensors = [] for condition in monitored_conditions: sensors.append( EagleSensor( eagle_data, condition, SENSORS[condition][0], SENSORS[condition][1] ) ) add_entities(sensors) class EagleSensor(Entity): """Implementation of the Rainforest Eagle-200 sensor.""" def __init__(self, eagle_data, sensor_type, name, unit): """Initialize the sensor.""" self.eagle_data = eagle_data self._type = sensor_type self._name = name self._unit_of_measurement = unit self._state = None @property def device_class(self): """Return the power device class for the instantanous_demand sensor.""" if self._type == "instantanous_demand": return DEVICE_CLASS_POWER return None @property def name(self): """Return the name of the sensor.""" return self._name @property def state(self): """Return the state of the sensor.""" return self._state @property def unit_of_measurement(self): """Return the unit of measurement.""" return self._unit_of_measurement def update(self): """Get the energy information from the Rainforest Eagle.""" self.eagle_data.update() self._state = self.eagle_data.get_state(self._type) class EagleData: """Get the latest data from the Eagle-200 device.""" def __init__(self, eagle_reader): """Initialize the data object.""" self._eagle_reader = eagle_reader self.data = {} @Throttle(MIN_SCAN_INTERVAL) def update(self): """Get the latest data from the Eagle-200 device.""" try: self.data = self._eagle_reader.update() _LOGGER.debug("API data: %s", self.data) except (ConnectError, HTTPError, Timeout, ValueError) as error: _LOGGER.error("Unable to connect during update: %s", error) self.data = {} def get_state(self, sensor_type): """Get the sensor value from the dictionary.""" state = self.data.get(sensor_type) _LOGGER.debug("Updating: %s - %s", sensor_type, state) return state class LeagleReader(LegacyReader): """Wraps uEagle to make it behave like eagle_reader, offering update().""" def update(self): """Fetch and return the four sensor values in a dict.""" out = {} resp = self.get_instantaneous_demand()["InstantaneousDemand"] out["instantanous_demand"] = resp["Demand"] resp = self.get_current_summation()["CurrentSummation"] out["summation_delivered"] = resp["SummationDelivered"] out["summation_received"] = resp["SummationReceived"] out["summation_total"] = out["summation_delivered"] - out["summation_received"] return out
"""Tests for the SolarEdge config flow.""" import pytest from requests.exceptions import ConnectTimeout, HTTPError from homeassistant import data_entry_flow from homeassistant.components.solaredge import config_flow from homeassistant.components.solaredge.const import CONF_SITE_ID, DEFAULT_NAME from homeassistant.const import CONF_API_KEY, CONF_NAME from tests.async_mock import Mock, patch from tests.common import MockConfigEntry NAME = "solaredge site 1 2 3" SITE_ID = "1a2b3c4d5e6f7g8h" API_KEY = "a1b2c3d4e5f6g7h8" @pytest.fixture(name="test_api") def mock_controller(): """Mock a successful Solaredge API.""" api = Mock() api.get_details.return_value = {"details": {"status": "active"}} with patch("solaredge.Solaredge", return_value=api): yield api def init_config_flow(hass): """Init a configuration flow.""" flow = config_flow.SolarEdgeConfigFlow() flow.hass = hass return flow async def test_user(hass, test_api): """Test user config.""" flow = init_config_flow(hass) result = await flow.async_step_user() assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "user" # tets with all provided result = await flow.async_step_user( {CONF_NAME: NAME, CONF_API_KEY: API_KEY, CONF_SITE_ID: SITE_ID} ) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["title"] == "solaredge_site_1_2_3" assert result["data"][CONF_SITE_ID] == SITE_ID assert result["data"][CONF_API_KEY] == API_KEY async def test_import(hass, test_api): """Test import step.""" flow = init_config_flow(hass) # import with site_id and api_key result = await flow.async_step_import( {CONF_API_KEY: API_KEY, CONF_SITE_ID: SITE_ID} ) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["title"] == "solaredge" assert result["data"][CONF_SITE_ID] == SITE_ID assert result["data"][CONF_API_KEY] == API_KEY # import with all result = await flow.async_step_import( {CONF_API_KEY: API_KEY, CONF_SITE_ID: SITE_ID, CONF_NAME: NAME} ) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["title"] == "solaredge_site_1_2_3" assert result["data"][CONF_SITE_ID] == SITE_ID assert result["data"][CONF_API_KEY] == API_KEY async def test_abort_if_already_setup(hass, test_api): """Test we abort if the site_id is already setup.""" flow = init_config_flow(hass) MockConfigEntry( domain="solaredge", data={CONF_NAME: DEFAULT_NAME, CONF_SITE_ID: SITE_ID, CONF_API_KEY: API_KEY}, ).add_to_hass(hass) # import: Should fail, same SITE_ID result = await flow.async_step_import( {CONF_NAME: DEFAULT_NAME, CONF_SITE_ID: SITE_ID, CONF_API_KEY: API_KEY} ) assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT assert result["reason"] == "site_exists" # user: Should fail, same SITE_ID result = await flow.async_step_user( {CONF_NAME: "test", CONF_SITE_ID: SITE_ID, CONF_API_KEY: "test"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["errors"] == {CONF_SITE_ID: "site_exists"} async def test_asserts(hass, test_api): """Test the _site_in_configuration_exists method.""" flow = init_config_flow(hass) # test with inactive site test_api.get_details.return_value = {"details": {"status": "NOK"}} result = await flow.async_step_user( {CONF_NAME: NAME, CONF_API_KEY: API_KEY, CONF_SITE_ID: SITE_ID} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["errors"] == {CONF_SITE_ID: "site_not_active"} # test with api_failure test_api.get_details.return_value = {} result = await flow.async_step_user( {CONF_NAME: NAME, CONF_API_KEY: API_KEY, CONF_SITE_ID: SITE_ID} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["errors"] == {CONF_SITE_ID: "api_failure"} # test with ConnectionTimeout test_api.get_details.side_effect = ConnectTimeout() result = await flow.async_step_user( {CONF_NAME: NAME, CONF_API_KEY: API_KEY, CONF_SITE_ID: SITE_ID} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["errors"] == {CONF_SITE_ID: "could_not_connect"} # test with HTTPError test_api.get_details.side_effect = HTTPError() result = await flow.async_step_user( {CONF_NAME: NAME, CONF_API_KEY: API_KEY, CONF_SITE_ID: SITE_ID} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["errors"] == {CONF_SITE_ID: "could_not_connect"}
robbiet480/home-assistant
tests/components/solaredge/test_config_flow.py
homeassistant/components/rainforest_eagle/sensor.py
"""Handle MySensors devices.""" from functools import partial import logging from homeassistant.const import ATTR_BATTERY_LEVEL, STATE_OFF, STATE_ON from homeassistant.core import callback from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity import Entity from .const import CHILD_CALLBACK, NODE_CALLBACK, UPDATE_DELAY _LOGGER = logging.getLogger(__name__) ATTR_CHILD_ID = "child_id" ATTR_DESCRIPTION = "description" ATTR_DEVICE = "device" ATTR_NODE_ID = "node_id" ATTR_HEARTBEAT = "heartbeat" MYSENSORS_PLATFORM_DEVICES = "mysensors_devices_{}" def get_mysensors_devices(hass, domain): """Return MySensors devices for a platform.""" if MYSENSORS_PLATFORM_DEVICES.format(domain) not in hass.data: hass.data[MYSENSORS_PLATFORM_DEVICES.format(domain)] = {} return hass.data[MYSENSORS_PLATFORM_DEVICES.format(domain)] class MySensorsDevice: """Representation of a MySensors device.""" def __init__(self, gateway, node_id, child_id, name, value_type): """Set up the MySensors device.""" self.gateway = gateway self.node_id = node_id self.child_id = child_id self._name = name self.value_type = value_type child = gateway.sensors[node_id].children[child_id] self.child_type = child.type self._values = {} self._update_scheduled = False self.hass = None @property def name(self): """Return the name of this entity.""" return self._name @property def device_state_attributes(self): """Return device specific state attributes.""" node = self.gateway.sensors[self.node_id] child = node.children[self.child_id] attr = { ATTR_BATTERY_LEVEL: node.battery_level, ATTR_HEARTBEAT: node.heartbeat, ATTR_CHILD_ID: self.child_id, ATTR_DESCRIPTION: child.description, ATTR_DEVICE: self.gateway.device, ATTR_NODE_ID: self.node_id, } set_req = self.gateway.const.SetReq for value_type, value in self._values.items(): attr[set_req(value_type).name] = value return attr async def async_update(self): """Update the controller with the latest value from a sensor.""" node = self.gateway.sensors[self.node_id] child = node.children[self.child_id] set_req = self.gateway.const.SetReq for value_type, value in child.values.items(): _LOGGER.debug( "Entity update: %s: value_type %s, value = %s", self._name, value_type, value, ) if value_type in ( set_req.V_ARMED, set_req.V_LIGHT, set_req.V_LOCK_STATUS, set_req.V_TRIPPED, ): self._values[value_type] = STATE_ON if int(value) == 1 else STATE_OFF elif value_type == set_req.V_DIMMER: self._values[value_type] = int(value) else: self._values[value_type] = value async def _async_update_callback(self): """Update the device.""" raise NotImplementedError @callback def async_update_callback(self): """Update the device after delay.""" if self._update_scheduled: return async def update(): """Perform update.""" try: await self._async_update_callback() except Exception: # pylint: disable=broad-except _LOGGER.exception("Error updating %s", self.name) finally: self._update_scheduled = False self._update_scheduled = True delayed_update = partial(self.hass.async_create_task, update()) self.hass.loop.call_later(UPDATE_DELAY, delayed_update) class MySensorsEntity(MySensorsDevice, Entity): """Representation of a MySensors entity.""" @property def should_poll(self): """Return the polling state. The gateway pushes its states.""" return False @property def available(self): """Return true if entity is available.""" return self.value_type in self._values async def _async_update_callback(self): """Update the entity.""" await self.async_update_ha_state(True) async def async_added_to_hass(self): """Register update callback.""" gateway_id = id(self.gateway) dev_id = gateway_id, self.node_id, self.child_id, self.value_type self.async_on_remove( async_dispatcher_connect( self.hass, CHILD_CALLBACK.format(*dev_id), self.async_update_callback ) ) self.async_on_remove( async_dispatcher_connect( self.hass, NODE_CALLBACK.format(gateway_id, self.node_id), self.async_update_callback, ) )
"""Tests for the SolarEdge config flow.""" import pytest from requests.exceptions import ConnectTimeout, HTTPError from homeassistant import data_entry_flow from homeassistant.components.solaredge import config_flow from homeassistant.components.solaredge.const import CONF_SITE_ID, DEFAULT_NAME from homeassistant.const import CONF_API_KEY, CONF_NAME from tests.async_mock import Mock, patch from tests.common import MockConfigEntry NAME = "solaredge site 1 2 3" SITE_ID = "1a2b3c4d5e6f7g8h" API_KEY = "a1b2c3d4e5f6g7h8" @pytest.fixture(name="test_api") def mock_controller(): """Mock a successful Solaredge API.""" api = Mock() api.get_details.return_value = {"details": {"status": "active"}} with patch("solaredge.Solaredge", return_value=api): yield api def init_config_flow(hass): """Init a configuration flow.""" flow = config_flow.SolarEdgeConfigFlow() flow.hass = hass return flow async def test_user(hass, test_api): """Test user config.""" flow = init_config_flow(hass) result = await flow.async_step_user() assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "user" # tets with all provided result = await flow.async_step_user( {CONF_NAME: NAME, CONF_API_KEY: API_KEY, CONF_SITE_ID: SITE_ID} ) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["title"] == "solaredge_site_1_2_3" assert result["data"][CONF_SITE_ID] == SITE_ID assert result["data"][CONF_API_KEY] == API_KEY async def test_import(hass, test_api): """Test import step.""" flow = init_config_flow(hass) # import with site_id and api_key result = await flow.async_step_import( {CONF_API_KEY: API_KEY, CONF_SITE_ID: SITE_ID} ) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["title"] == "solaredge" assert result["data"][CONF_SITE_ID] == SITE_ID assert result["data"][CONF_API_KEY] == API_KEY # import with all result = await flow.async_step_import( {CONF_API_KEY: API_KEY, CONF_SITE_ID: SITE_ID, CONF_NAME: NAME} ) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["title"] == "solaredge_site_1_2_3" assert result["data"][CONF_SITE_ID] == SITE_ID assert result["data"][CONF_API_KEY] == API_KEY async def test_abort_if_already_setup(hass, test_api): """Test we abort if the site_id is already setup.""" flow = init_config_flow(hass) MockConfigEntry( domain="solaredge", data={CONF_NAME: DEFAULT_NAME, CONF_SITE_ID: SITE_ID, CONF_API_KEY: API_KEY}, ).add_to_hass(hass) # import: Should fail, same SITE_ID result = await flow.async_step_import( {CONF_NAME: DEFAULT_NAME, CONF_SITE_ID: SITE_ID, CONF_API_KEY: API_KEY} ) assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT assert result["reason"] == "site_exists" # user: Should fail, same SITE_ID result = await flow.async_step_user( {CONF_NAME: "test", CONF_SITE_ID: SITE_ID, CONF_API_KEY: "test"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["errors"] == {CONF_SITE_ID: "site_exists"} async def test_asserts(hass, test_api): """Test the _site_in_configuration_exists method.""" flow = init_config_flow(hass) # test with inactive site test_api.get_details.return_value = {"details": {"status": "NOK"}} result = await flow.async_step_user( {CONF_NAME: NAME, CONF_API_KEY: API_KEY, CONF_SITE_ID: SITE_ID} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["errors"] == {CONF_SITE_ID: "site_not_active"} # test with api_failure test_api.get_details.return_value = {} result = await flow.async_step_user( {CONF_NAME: NAME, CONF_API_KEY: API_KEY, CONF_SITE_ID: SITE_ID} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["errors"] == {CONF_SITE_ID: "api_failure"} # test with ConnectionTimeout test_api.get_details.side_effect = ConnectTimeout() result = await flow.async_step_user( {CONF_NAME: NAME, CONF_API_KEY: API_KEY, CONF_SITE_ID: SITE_ID} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["errors"] == {CONF_SITE_ID: "could_not_connect"} # test with HTTPError test_api.get_details.side_effect = HTTPError() result = await flow.async_step_user( {CONF_NAME: NAME, CONF_API_KEY: API_KEY, CONF_SITE_ID: SITE_ID} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["errors"] == {CONF_SITE_ID: "could_not_connect"}
robbiet480/home-assistant
tests/components/solaredge/test_config_flow.py
homeassistant/components/mysensors/device.py
"""ONVIF device abstraction.""" import asyncio import datetime as dt import os from typing import List from aiohttp.client_exceptions import ClientConnectionError, ServerDisconnectedError import onvif from onvif import ONVIFCamera from onvif.exceptions import ONVIFError from zeep.exceptions import Fault from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( CONF_HOST, CONF_NAME, CONF_PASSWORD, CONF_PORT, CONF_USERNAME, ) from homeassistant.core import HomeAssistant import homeassistant.util.dt as dt_util from .const import ( ABSOLUTE_MOVE, CONTINUOUS_MOVE, GOTOPRESET_MOVE, LOGGER, PAN_FACTOR, RELATIVE_MOVE, TILT_FACTOR, ZOOM_FACTOR, ) from .event import EventManager from .models import PTZ, Capabilities, DeviceInfo, Profile, Resolution, Video class ONVIFDevice: """Manages an ONVIF device.""" def __init__(self, hass: HomeAssistant, config_entry: ConfigEntry = None): """Initialize the device.""" self.hass: HomeAssistant = hass self.config_entry: ConfigEntry = config_entry self.available: bool = True self.device: ONVIFCamera = None self.events: EventManager = None self.info: DeviceInfo = DeviceInfo() self.capabilities: Capabilities = Capabilities() self.profiles: List[Profile] = [] self.max_resolution: int = 0 self._dt_diff_seconds: int = 0 @property def name(self) -> str: """Return the name of this device.""" return self.config_entry.data[CONF_NAME] @property def host(self) -> str: """Return the host of this device.""" return self.config_entry.data[CONF_HOST] @property def port(self) -> int: """Return the port of this device.""" return self.config_entry.data[CONF_PORT] @property def username(self) -> int: """Return the username of this device.""" return self.config_entry.data[CONF_USERNAME] @property def password(self) -> int: """Return the password of this device.""" return self.config_entry.data[CONF_PASSWORD] async def async_setup(self) -> bool: """Set up the device.""" self.device = get_device( self.hass, host=self.config_entry.data[CONF_HOST], port=self.config_entry.data[CONF_PORT], username=self.config_entry.data[CONF_USERNAME], password=self.config_entry.data[CONF_PASSWORD], ) # Get all device info try: await self.device.update_xaddrs() await self.async_check_date_and_time() self.info = await self.async_get_device_info() self.capabilities = await self.async_get_capabilities() self.profiles = await self.async_get_profiles() if self.capabilities.ptz: self.device.create_ptz_service() if self.capabilities.events: self.events = EventManager( self.hass, self.device, self.config_entry.unique_id ) # Determine max resolution from profiles self.max_resolution = max( profile.video.resolution.width for profile in self.profiles if profile.video.encoding == "H264" ) except ClientConnectionError as err: LOGGER.warning( "Couldn't connect to camera '%s', but will retry later. Error: %s", self.name, err, ) self.available = False except Fault as err: LOGGER.error( "Couldn't connect to camera '%s', please verify " "that the credentials are correct. Error: %s", self.name, err, ) return False return True async def async_stop(self, event=None): """Shut it all down.""" if self.events: await self.events.async_stop() await self.device.close() async def async_check_date_and_time(self) -> None: """Warns if device and system date not synced.""" LOGGER.debug("Setting up the ONVIF device management service") device_mgmt = self.device.create_devicemgmt_service() LOGGER.debug("Retrieving current device date/time") try: system_date = dt_util.utcnow() device_time = await device_mgmt.GetSystemDateAndTime() if not device_time: LOGGER.debug( """Couldn't get device '%s' date/time. GetSystemDateAndTime() return null/empty""", self.name, ) return if device_time.UTCDateTime: tzone = dt_util.UTC cdate = device_time.UTCDateTime else: tzone = ( dt_util.get_time_zone(device_time.TimeZone) or dt_util.DEFAULT_TIME_ZONE ) cdate = device_time.LocalDateTime if cdate is None: LOGGER.warning("Could not retrieve date/time on this camera") else: cam_date = dt.datetime( cdate.Date.Year, cdate.Date.Month, cdate.Date.Day, cdate.Time.Hour, cdate.Time.Minute, cdate.Time.Second, 0, tzone, ) cam_date_utc = cam_date.astimezone(dt_util.UTC) LOGGER.debug( "Device date/time: %s | System date/time: %s", cam_date_utc, system_date, ) dt_diff = cam_date - system_date self._dt_diff_seconds = dt_diff.total_seconds() if self._dt_diff_seconds > 5: LOGGER.warning( "The date/time on the device (UTC) is '%s', " "which is different from the system '%s', " "this could lead to authentication issues", cam_date_utc, system_date, ) except ServerDisconnectedError as err: LOGGER.warning( "Couldn't get device '%s' date/time. Error: %s", self.name, err ) async def async_get_device_info(self) -> DeviceInfo: """Obtain information about this device.""" device_mgmt = self.device.create_devicemgmt_service() device_info = await device_mgmt.GetDeviceInformation() # Grab the last MAC address for backwards compatibility mac = None try: network_interfaces = await device_mgmt.GetNetworkInterfaces() for interface in network_interfaces: if interface.Enabled: mac = interface.Info.HwAddress except Fault as fault: if "not implemented" not in fault.message: raise fault LOGGER.debug( "Couldn't get network interfaces from ONVIF deivice '%s'. Error: %s", self.name, fault, ) return DeviceInfo( device_info.Manufacturer, device_info.Model, device_info.FirmwareVersion, device_info.SerialNumber, mac, ) async def async_get_capabilities(self): """Obtain information about the available services on the device.""" snapshot = False try: media_service = self.device.create_media_service() media_capabilities = await media_service.GetServiceCapabilities() snapshot = media_capabilities and media_capabilities.SnapshotUri except (ONVIFError, Fault, ServerDisconnectedError): pass pullpoint = False try: event_service = self.device.create_events_service() event_capabilities = await event_service.GetServiceCapabilities() pullpoint = event_capabilities and event_capabilities.WSPullPointSupport except (ONVIFError, Fault): pass ptz = False try: self.device.get_definition("ptz") ptz = True except ONVIFError: pass return Capabilities(snapshot, pullpoint, ptz) async def async_get_profiles(self) -> List[Profile]: """Obtain media profiles for this device.""" media_service = self.device.create_media_service() result = await media_service.GetProfiles() profiles = [] for key, onvif_profile in enumerate(result): # Only add H264 profiles if ( not onvif_profile.VideoEncoderConfiguration or onvif_profile.VideoEncoderConfiguration.Encoding != "H264" ): continue profile = Profile( key, onvif_profile.token, onvif_profile.Name, Video( onvif_profile.VideoEncoderConfiguration.Encoding, Resolution( onvif_profile.VideoEncoderConfiguration.Resolution.Width, onvif_profile.VideoEncoderConfiguration.Resolution.Height, ), ), ) # Configure PTZ options if onvif_profile.PTZConfiguration: profile.ptz = PTZ( onvif_profile.PTZConfiguration.DefaultContinuousPanTiltVelocitySpace is not None, onvif_profile.PTZConfiguration.DefaultRelativePanTiltTranslationSpace is not None, onvif_profile.PTZConfiguration.DefaultAbsolutePantTiltPositionSpace is not None, ) try: ptz_service = self.device.create_ptz_service() presets = await ptz_service.GetPresets(profile.token) profile.ptz.presets = [preset.token for preset in presets if preset] except (Fault, ServerDisconnectedError): # It's OK if Presets aren't supported profile.ptz.presets = [] profiles.append(profile) return profiles async def async_get_snapshot_uri(self, profile: Profile) -> str: """Get the snapshot URI for a specified profile.""" if not self.capabilities.snapshot: return None media_service = self.device.create_media_service() req = media_service.create_type("GetSnapshotUri") req.ProfileToken = profile.token result = await media_service.GetSnapshotUri(req) return result.Uri async def async_get_stream_uri(self, profile: Profile) -> str: """Get the stream URI for a specified profile.""" media_service = self.device.create_media_service() req = media_service.create_type("GetStreamUri") req.ProfileToken = profile.token req.StreamSetup = { "Stream": "RTP-Unicast", "Transport": {"Protocol": "RTSP"}, } result = await media_service.GetStreamUri(req) return result.Uri async def async_perform_ptz( self, profile: Profile, distance, speed, move_mode, continuous_duration, preset, pan=None, tilt=None, zoom=None, ): """Perform a PTZ action on the camera.""" if not self.capabilities.ptz: LOGGER.warning("PTZ actions are not supported on device '%s'", self.name) return ptz_service = self.device.create_ptz_service() pan_val = distance * PAN_FACTOR.get(pan, 0) tilt_val = distance * TILT_FACTOR.get(tilt, 0) zoom_val = distance * ZOOM_FACTOR.get(zoom, 0) speed_val = speed preset_val = preset LOGGER.debug( "Calling %s PTZ | Pan = %4.2f | Tilt = %4.2f | Zoom = %4.2f | Speed = %4.2f | Preset = %s", move_mode, pan_val, tilt_val, zoom_val, speed_val, preset_val, ) try: req = ptz_service.create_type(move_mode) req.ProfileToken = profile.token if move_mode == CONTINUOUS_MOVE: # Guard against unsupported operation if not profile.ptz.continuous: LOGGER.warning( "ContinuousMove not supported on device '%s'", self.name ) return req.Velocity = { "PanTilt": {"x": pan_val, "y": tilt_val}, "Zoom": {"x": zoom_val}, } await ptz_service.ContinuousMove(req) await asyncio.sleep(continuous_duration) req = ptz_service.create_type("Stop") req.ProfileToken = profile.token await ptz_service.Stop({"ProfileToken": req.ProfileToken}) elif move_mode == RELATIVE_MOVE: # Guard against unsupported operation if not profile.ptz.relative: LOGGER.warning( "ContinuousMove not supported on device '%s'", self.name ) return req.Translation = { "PanTilt": {"x": pan_val, "y": tilt_val}, "Zoom": {"x": zoom_val}, } req.Speed = { "PanTilt": {"x": speed_val, "y": speed_val}, "Zoom": {"x": speed_val}, } await ptz_service.RelativeMove(req) elif move_mode == ABSOLUTE_MOVE: # Guard against unsupported operation if not profile.ptz.absolute: LOGGER.warning( "ContinuousMove not supported on device '%s'", self.name ) return req.Position = { "PanTilt": {"x": pan_val, "y": tilt_val}, "Zoom": {"x": zoom_val}, } req.Speed = { "PanTilt": {"x": speed_val, "y": speed_val}, "Zoom": {"x": speed_val}, } await ptz_service.AbsoluteMove(req) elif move_mode == GOTOPRESET_MOVE: # Guard against unsupported operation if preset_val not in profile.ptz.presets: LOGGER.warning( "PTZ preset '%s' does not exist on device '%s'. Available Presets: %s", preset_val, self.name, ", ".join(profile.ptz.presets), ) return req.PresetToken = preset_val req.Speed = { "PanTilt": {"x": speed_val, "y": speed_val}, "Zoom": {"x": speed_val}, } await ptz_service.GotoPreset(req) except ONVIFError as err: if "Bad Request" in err.reason: LOGGER.warning("Device '%s' doesn't support PTZ.", self.name) else: LOGGER.error("Error trying to perform PTZ action: %s", err) def get_device(hass, host, port, username, password) -> ONVIFCamera: """Get ONVIFCamera instance.""" return ONVIFCamera( host, port, username, password, f"{os.path.dirname(onvif.__file__)}/wsdl/", no_cache=True, )
"""Tests for the SolarEdge config flow.""" import pytest from requests.exceptions import ConnectTimeout, HTTPError from homeassistant import data_entry_flow from homeassistant.components.solaredge import config_flow from homeassistant.components.solaredge.const import CONF_SITE_ID, DEFAULT_NAME from homeassistant.const import CONF_API_KEY, CONF_NAME from tests.async_mock import Mock, patch from tests.common import MockConfigEntry NAME = "solaredge site 1 2 3" SITE_ID = "1a2b3c4d5e6f7g8h" API_KEY = "a1b2c3d4e5f6g7h8" @pytest.fixture(name="test_api") def mock_controller(): """Mock a successful Solaredge API.""" api = Mock() api.get_details.return_value = {"details": {"status": "active"}} with patch("solaredge.Solaredge", return_value=api): yield api def init_config_flow(hass): """Init a configuration flow.""" flow = config_flow.SolarEdgeConfigFlow() flow.hass = hass return flow async def test_user(hass, test_api): """Test user config.""" flow = init_config_flow(hass) result = await flow.async_step_user() assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "user" # tets with all provided result = await flow.async_step_user( {CONF_NAME: NAME, CONF_API_KEY: API_KEY, CONF_SITE_ID: SITE_ID} ) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["title"] == "solaredge_site_1_2_3" assert result["data"][CONF_SITE_ID] == SITE_ID assert result["data"][CONF_API_KEY] == API_KEY async def test_import(hass, test_api): """Test import step.""" flow = init_config_flow(hass) # import with site_id and api_key result = await flow.async_step_import( {CONF_API_KEY: API_KEY, CONF_SITE_ID: SITE_ID} ) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["title"] == "solaredge" assert result["data"][CONF_SITE_ID] == SITE_ID assert result["data"][CONF_API_KEY] == API_KEY # import with all result = await flow.async_step_import( {CONF_API_KEY: API_KEY, CONF_SITE_ID: SITE_ID, CONF_NAME: NAME} ) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["title"] == "solaredge_site_1_2_3" assert result["data"][CONF_SITE_ID] == SITE_ID assert result["data"][CONF_API_KEY] == API_KEY async def test_abort_if_already_setup(hass, test_api): """Test we abort if the site_id is already setup.""" flow = init_config_flow(hass) MockConfigEntry( domain="solaredge", data={CONF_NAME: DEFAULT_NAME, CONF_SITE_ID: SITE_ID, CONF_API_KEY: API_KEY}, ).add_to_hass(hass) # import: Should fail, same SITE_ID result = await flow.async_step_import( {CONF_NAME: DEFAULT_NAME, CONF_SITE_ID: SITE_ID, CONF_API_KEY: API_KEY} ) assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT assert result["reason"] == "site_exists" # user: Should fail, same SITE_ID result = await flow.async_step_user( {CONF_NAME: "test", CONF_SITE_ID: SITE_ID, CONF_API_KEY: "test"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["errors"] == {CONF_SITE_ID: "site_exists"} async def test_asserts(hass, test_api): """Test the _site_in_configuration_exists method.""" flow = init_config_flow(hass) # test with inactive site test_api.get_details.return_value = {"details": {"status": "NOK"}} result = await flow.async_step_user( {CONF_NAME: NAME, CONF_API_KEY: API_KEY, CONF_SITE_ID: SITE_ID} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["errors"] == {CONF_SITE_ID: "site_not_active"} # test with api_failure test_api.get_details.return_value = {} result = await flow.async_step_user( {CONF_NAME: NAME, CONF_API_KEY: API_KEY, CONF_SITE_ID: SITE_ID} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["errors"] == {CONF_SITE_ID: "api_failure"} # test with ConnectionTimeout test_api.get_details.side_effect = ConnectTimeout() result = await flow.async_step_user( {CONF_NAME: NAME, CONF_API_KEY: API_KEY, CONF_SITE_ID: SITE_ID} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["errors"] == {CONF_SITE_ID: "could_not_connect"} # test with HTTPError test_api.get_details.side_effect = HTTPError() result = await flow.async_step_user( {CONF_NAME: NAME, CONF_API_KEY: API_KEY, CONF_SITE_ID: SITE_ID} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["errors"] == {CONF_SITE_ID: "could_not_connect"}
robbiet480/home-assistant
tests/components/solaredge/test_config_flow.py
homeassistant/components/onvif/device.py
"""Support for Bbox Bouygues Modem Router.""" from datetime import timedelta import logging import pybbox import requests import voluptuous as vol from homeassistant.components.sensor import PLATFORM_SCHEMA from homeassistant.const import ( ATTR_ATTRIBUTION, CONF_MONITORED_VARIABLES, CONF_NAME, DATA_RATE_MEGABITS_PER_SECOND, DEVICE_CLASS_TIMESTAMP, ) import homeassistant.helpers.config_validation as cv from homeassistant.helpers.entity import Entity from homeassistant.util import Throttle from homeassistant.util.dt import utcnow _LOGGER = logging.getLogger(__name__) ATTRIBUTION = "Powered by Bouygues Telecom" DEFAULT_NAME = "Bbox" MIN_TIME_BETWEEN_UPDATES = timedelta(seconds=60) # Sensor types are defined like so: Name, unit, icon SENSOR_TYPES = { "down_max_bandwidth": [ "Maximum Download Bandwidth", DATA_RATE_MEGABITS_PER_SECOND, "mdi:download", ], "up_max_bandwidth": [ "Maximum Upload Bandwidth", DATA_RATE_MEGABITS_PER_SECOND, "mdi:upload", ], "current_down_bandwidth": [ "Currently Used Download Bandwidth", DATA_RATE_MEGABITS_PER_SECOND, "mdi:download", ], "current_up_bandwidth": [ "Currently Used Upload Bandwidth", DATA_RATE_MEGABITS_PER_SECOND, "mdi:upload", ], "uptime": ["Uptime", None, "mdi:clock"], "number_of_reboots": ["Number of reboot", None, "mdi:restart"], } PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( { vol.Required(CONF_MONITORED_VARIABLES): vol.All( cv.ensure_list, [vol.In(SENSOR_TYPES)] ), vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string, } ) def setup_platform(hass, config, add_entities, discovery_info=None): """Set up the Bbox sensor.""" # Create a data fetcher to support all of the configured sensors. Then make # the first call to init the data. try: bbox_data = BboxData() bbox_data.update() except requests.exceptions.HTTPError as error: _LOGGER.error(error) return False name = config[CONF_NAME] sensors = [] for variable in config[CONF_MONITORED_VARIABLES]: if variable == "uptime": sensors.append(BboxUptimeSensor(bbox_data, variable, name)) else: sensors.append(BboxSensor(bbox_data, variable, name)) add_entities(sensors, True) class BboxUptimeSensor(Entity): """Bbox uptime sensor.""" def __init__(self, bbox_data, sensor_type, name): """Initialize the sensor.""" self.client_name = name self.type = sensor_type self._name = SENSOR_TYPES[sensor_type][0] self._unit_of_measurement = SENSOR_TYPES[sensor_type][1] self._icon = SENSOR_TYPES[sensor_type][2] self.bbox_data = bbox_data self._state = None @property def name(self): """Return the name of the sensor.""" return f"{self.client_name} {self._name}" @property def state(self): """Return the state of the sensor.""" return self._state @property def icon(self): """Icon to use in the frontend, if any.""" return self._icon @property def device_state_attributes(self): """Return the state attributes.""" return {ATTR_ATTRIBUTION: ATTRIBUTION} @property def device_class(self): """Return the class of this sensor.""" return DEVICE_CLASS_TIMESTAMP def update(self): """Get the latest data from Bbox and update the state.""" self.bbox_data.update() uptime = utcnow() - timedelta( seconds=self.bbox_data.router_infos["device"]["uptime"] ) self._state = uptime.replace(microsecond=0).isoformat() class BboxSensor(Entity): """Implementation of a Bbox sensor.""" def __init__(self, bbox_data, sensor_type, name): """Initialize the sensor.""" self.client_name = name self.type = sensor_type self._name = SENSOR_TYPES[sensor_type][0] self._unit_of_measurement = SENSOR_TYPES[sensor_type][1] self._icon = SENSOR_TYPES[sensor_type][2] self.bbox_data = bbox_data self._state = None @property def name(self): """Return the name of the sensor.""" return f"{self.client_name} {self._name}" @property def state(self): """Return the state of the sensor.""" return self._state @property def unit_of_measurement(self): """Return the unit of measurement of this entity, if any.""" return self._unit_of_measurement @property def icon(self): """Icon to use in the frontend, if any.""" return self._icon @property def device_state_attributes(self): """Return the state attributes.""" return {ATTR_ATTRIBUTION: ATTRIBUTION} def update(self): """Get the latest data from Bbox and update the state.""" self.bbox_data.update() if self.type == "down_max_bandwidth": self._state = round(self.bbox_data.data["rx"]["maxBandwidth"] / 1000, 2) elif self.type == "up_max_bandwidth": self._state = round(self.bbox_data.data["tx"]["maxBandwidth"] / 1000, 2) elif self.type == "current_down_bandwidth": self._state = round(self.bbox_data.data["rx"]["bandwidth"] / 1000, 2) elif self.type == "current_up_bandwidth": self._state = round(self.bbox_data.data["tx"]["bandwidth"] / 1000, 2) elif self.type == "number_of_reboots": self._state = self.bbox_data.router_infos["device"]["numberofboots"] class BboxData: """Get data from the Bbox.""" def __init__(self): """Initialize the data object.""" self.data = None self.router_infos = None @Throttle(MIN_TIME_BETWEEN_UPDATES) def update(self): """Get the latest data from the Bbox.""" try: box = pybbox.Bbox() self.data = box.get_ip_stats() self.router_infos = box.get_bbox_info() except requests.exceptions.HTTPError as error: _LOGGER.error(error) self.data = None self.router_infos = None return False
"""Tests for the SolarEdge config flow.""" import pytest from requests.exceptions import ConnectTimeout, HTTPError from homeassistant import data_entry_flow from homeassistant.components.solaredge import config_flow from homeassistant.components.solaredge.const import CONF_SITE_ID, DEFAULT_NAME from homeassistant.const import CONF_API_KEY, CONF_NAME from tests.async_mock import Mock, patch from tests.common import MockConfigEntry NAME = "solaredge site 1 2 3" SITE_ID = "1a2b3c4d5e6f7g8h" API_KEY = "a1b2c3d4e5f6g7h8" @pytest.fixture(name="test_api") def mock_controller(): """Mock a successful Solaredge API.""" api = Mock() api.get_details.return_value = {"details": {"status": "active"}} with patch("solaredge.Solaredge", return_value=api): yield api def init_config_flow(hass): """Init a configuration flow.""" flow = config_flow.SolarEdgeConfigFlow() flow.hass = hass return flow async def test_user(hass, test_api): """Test user config.""" flow = init_config_flow(hass) result = await flow.async_step_user() assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "user" # tets with all provided result = await flow.async_step_user( {CONF_NAME: NAME, CONF_API_KEY: API_KEY, CONF_SITE_ID: SITE_ID} ) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["title"] == "solaredge_site_1_2_3" assert result["data"][CONF_SITE_ID] == SITE_ID assert result["data"][CONF_API_KEY] == API_KEY async def test_import(hass, test_api): """Test import step.""" flow = init_config_flow(hass) # import with site_id and api_key result = await flow.async_step_import( {CONF_API_KEY: API_KEY, CONF_SITE_ID: SITE_ID} ) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["title"] == "solaredge" assert result["data"][CONF_SITE_ID] == SITE_ID assert result["data"][CONF_API_KEY] == API_KEY # import with all result = await flow.async_step_import( {CONF_API_KEY: API_KEY, CONF_SITE_ID: SITE_ID, CONF_NAME: NAME} ) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["title"] == "solaredge_site_1_2_3" assert result["data"][CONF_SITE_ID] == SITE_ID assert result["data"][CONF_API_KEY] == API_KEY async def test_abort_if_already_setup(hass, test_api): """Test we abort if the site_id is already setup.""" flow = init_config_flow(hass) MockConfigEntry( domain="solaredge", data={CONF_NAME: DEFAULT_NAME, CONF_SITE_ID: SITE_ID, CONF_API_KEY: API_KEY}, ).add_to_hass(hass) # import: Should fail, same SITE_ID result = await flow.async_step_import( {CONF_NAME: DEFAULT_NAME, CONF_SITE_ID: SITE_ID, CONF_API_KEY: API_KEY} ) assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT assert result["reason"] == "site_exists" # user: Should fail, same SITE_ID result = await flow.async_step_user( {CONF_NAME: "test", CONF_SITE_ID: SITE_ID, CONF_API_KEY: "test"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["errors"] == {CONF_SITE_ID: "site_exists"} async def test_asserts(hass, test_api): """Test the _site_in_configuration_exists method.""" flow = init_config_flow(hass) # test with inactive site test_api.get_details.return_value = {"details": {"status": "NOK"}} result = await flow.async_step_user( {CONF_NAME: NAME, CONF_API_KEY: API_KEY, CONF_SITE_ID: SITE_ID} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["errors"] == {CONF_SITE_ID: "site_not_active"} # test with api_failure test_api.get_details.return_value = {} result = await flow.async_step_user( {CONF_NAME: NAME, CONF_API_KEY: API_KEY, CONF_SITE_ID: SITE_ID} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["errors"] == {CONF_SITE_ID: "api_failure"} # test with ConnectionTimeout test_api.get_details.side_effect = ConnectTimeout() result = await flow.async_step_user( {CONF_NAME: NAME, CONF_API_KEY: API_KEY, CONF_SITE_ID: SITE_ID} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["errors"] == {CONF_SITE_ID: "could_not_connect"} # test with HTTPError test_api.get_details.side_effect = HTTPError() result = await flow.async_step_user( {CONF_NAME: NAME, CONF_API_KEY: API_KEY, CONF_SITE_ID: SITE_ID} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["errors"] == {CONF_SITE_ID: "could_not_connect"}
robbiet480/home-assistant
tests/components/solaredge/test_config_flow.py
homeassistant/components/bbox/sensor.py
#!/usr/bin/python3 import math import hashlib import numpy as np from typing import Any, Optional from ..exceptions import StrandError, ChromosomeError, MissingLocusError from .subloci import SubLoci from .attrs import LocusAttrs __all__ = ["Locus"] class Locus: def __init__( self, chromosome: str, start: int, end: int, source: str = "locuspocus", feature_type: str = "locus", strand: str = "+", frame: int = None, name: str = None, # Extra locus stuff attrs: LocusAttrs = None, subloci: SubLoci = None, ): self.chromosome = str(chromosome) self.start = int(start) self.end = int(end) self.source = str(source) self.feature_type = str(feature_type) self.strand = str(strand) self.frame = frame self.name = name self.attrs = LocusAttrs(attrs) self.subloci = SubLoci(subloci) def __eq__(self, other): return hash(self) == hash(other) if ( self.chromosome == other.chromosome and self.start == other.start and self.end == other.end and self.source == other.source and self.feature_type == other.feature_type and self.strand == other.strand and self.frame == other.frame and self.name == other.name and self.attrs == other.attrs #and self.subloci == other.subloci ): return True else: return False def __hash__(self): """ Convert the locus to a hash, uses md5. The hash is computed using the *core* properties of the Locus, i.e. changing any attrs will not change the hash value. Parameters ---------- None Returns ------- int : md5 hash of locus """ field_list = [ str(x) for x in ( self.chromosome, self.start, self.end, self.feature_type, self.strand, self.frame, ) ] subloci_list = [str(hash(x)) for x in self.subloci] # Create a full string loc_string = "_".join(field_list + subloci_list) # attr_list + subloci_list) digest = hashlib.md5(str.encode(loc_string)).hexdigest() return int(digest, base=16) def __len__(self): return abs(self.end - self.start) + 1 def __lt__(self, locus): if self.chromosome == locus.chromosome: return self.start < locus.start else: return self.chromosome < locus.chromosome def __le__(self, locus): if (self.chromosome, self.coor) == (locus.chromosome, locus.coor): return True else: return self < locus def __ge__(self, locus): if (self.chromosome, self.coor) == (locus.chromosome, locus.coor): return True else: return self > locus def __gt__(self, locus): if self.chromosome == locus.chromosome: return self.start > locus.start else: return self.chromosome > locus.chromosome def __repr__(self): return ( f"Locus(" f"{self.chromosome},{self.start},{self.end},source={self.source}," f"feature_type='{self.feature_type}'," f"strand='{self.strand}'," f"frame='{self.frame}'," f"name='{self.name}'," f"attrs={self.attrs}," f"subloci=[{len(self.subloci)} subloci]" f")" ) @property def stranded_start(self): if self.strand == "+": return min(self.coor) elif self.strand == "-": return max(self.coor) else: raise StrandError @property def stranded_end(self): if self.strand == "+": return max(self.coor) elif self.strand == "-": return min(self.coor) else: raise StrandError def __getitem__(self, item): return self.attrs[item] def __setitem__(self, key, val): self.attrs[key] = val def add_sublocus( self, locus: "Locus", find_parent=False, parent_attr: Optional[str] = "Parent" ): """ Adds a sublocus to the current Locus. The added locus will be added according to its `parent_attr` keyword. """ if not find_parent: self.subloci.add(locus) else: try: if locus[parent_attr] == self.name: self.subloci.add(locus) else: # Find the parent of the sublocus parent = self.subloci.find(locus[parent_attr]) if parent is None: raise MissingLocusError parent.subloci.add(locus) except KeyError: raise KeyError( f"Unable to resolve the key:{parent_attr} to find parent Locus" ) def as_record(self): return ( ( self.chromosome, self.start, self.end, self.source, self.feature_type, self.strand, self.frame, self.name, hash(self), ), self.attrs, ) def default_getitem(self, key, default=None) -> Any: """ Returns the attr value of the Locus based on the key. similar to: Locus['Name'] -> "Locus1". If the attr key (e.g. 'Name') does not exist, a default can be specified. Parameters ---------- key : str The key value of the attr default : Any If the key is not in the Locus attributes, return this value by default. """ try: val = self.attrs[key] except KeyError: val = default finally: return val @property def coor(self): """ Returns a tuple containing the start and end positions of the locus """ return (self.start, self.end) def upstream(self, distance: int) -> int: """ Calculates a base pair position 5' of the locus. Parameters ---------- distance : int The distance upstream of the locus """ if self.strand == "+": return max(0, self.start - distance) elif self.strand == "-": return self.end + distance def downstream(self, distance: int) -> int: """ Calculates a base pair position 3' of the locus Parameters ---------- distance : int The distance downstream of the locus """ if self.strand == "+": return self.end + distance elif self.strand == "-": return max(0, self.start) - distance # return self.end + distance @property def center(self): """ Calculates the center base pair position of the locus. NOTE: If the locus has an odd length, a 'half-bp' will be returned. E.g: Locus('1',100,200).center == Returns ------- The center position """ return self.start + len(self) / 2 def distance(self, locus): """ Return the number of base pairs between two loci. NOTE: this excludes the start/end bases of the loci. Locus A Locus B ==========---------========= 1 10 20 30 There are 9 bases between 10 and 20 (excluding positions 10 and 20 themselves because they are included in the loci). If the loci are on different chromosomes, return np.inf Parameters ---------- locus: Locus Object A second locus object to calculate distance. Returns ------- int: the number of bp between the loci np.inf: if on different chromosomes """ if self.chromosome != locus.chromosome: distance = np.inf else: x, y = sorted([self, locus]) distance = y.start - x.end - 1 return distance def center_distance(self, locus): """ Return the distance between the center of two loci. If the loci are on different chromosomes, return np.inf. Parameters ---------- locus : Locus Object A second locus object used to calculate a distance. Returns ------- int : the distance between the center of two loci. np.inf: if on different chromosomes """ if self.chromosome != locus.chromosome: distance = np.inf else: distance = math.floor(abs(self.center - locus.center)) return distance def combine(self, locus): """ Returns a new Locus with start and stop boundaries that contain both of the input loci. Both input loci are added to the subloci of the new Locus. NOTE: this ignores strand, the resultant Locus is just a container for the input loci. ___________Ascii Example__________________________ Locus A Locus B ------=========-------=============--------------- A.combine(B) ------=============================--------------- subloci=[A,B] """ if self.chromosome != locus.chromosome: raise ChromosomeError("Input Chromosomes do not match") x, y = sorted([self, locus]) start = x.start end = y.end return Locus(self.chromosome, start, end, subloci=[self, locus]) def as_tree(self, parent=None): # pragma: no cover from anytree import Node, RenderTree root = Node(f"{self.feature_type}:{self.name}", parent=parent) for c in self.subloci: node = c.as_tree(parent=root) if parent is None: for pre, _, node in RenderTree(root): print("%s%s" % (pre, node.name)) return root def __str__(self): return repr(self) # -------------------------------- # Factory Methods # -------------------------------- @classmethod def from_gff_line( cls, line, /, ID_attr: str = "ID", parent_attr: str = "Parent", attr_split: str = "=", ) -> "Locus": ( chromosome, source, feature, start, end, score, strand, frame, attributes, ) = line.strip().split("\t", maxsplit=8) # Cast data into appropriate types strand = None if strand == "." else strand frame = None if frame == "." else int(frame) # Get the attributes attributes = dict( [ (field.strip().split(attr_split)) for field in attributes.strip(";").split(";") ] ) # Store the score in the attrs if it exists if score != ".": attributes["score"] = float(score) # Parse out the Identifier if ID_attr in attributes: name = attributes[ID_attr] else: name = None l = cls( chromosome, start, end, source=source, feature_type=feature, strand=strand, frame=frame, name=name, attrs=attributes, ) return l
import pytest import numpy as np import minus80 as m80 from locuspocus import Locus, Loci from locuspocus.exceptions import StrandError, ChromosomeError @pytest.fixture(scope="module") def SimpleLoci(): a = Locus("1", 10, 20) b = Locus("1", 20, 30) c = Locus("2", 30, 40) d = Locus("2", 40, 50) x = Locus("1", 100, 200, attrs={"foo": "bar"}, name="x", subloci=[a, b]) y = Locus("2", 100, 200, attrs={"foo": "bar"}, name="y", subloci=[c, d]) if m80.exists("Loci", "test"): m80.delete("Loci", "test") ref = Loci("test") ref.add_locus(x) ref.add_locus(y) return ref @pytest.fixture def simpleLocusView(SimpleLoci): return SimpleLoci["x"] def test_initialization(simpleLocusView): # numeric chromosomes assert simpleLocusView.chromosome == "1" assert simpleLocusView.start == 100 assert simpleLocusView.end == 200 assert len(simpleLocusView) == 101 def test_getitem(simpleLocusView): assert simpleLocusView["foo"] == "bar" def test_default_getitem(simpleLocusView): assert simpleLocusView.default_getitem("name", "default") == "default" def test_start(simpleLocusView): assert simpleLocusView.start == 100 def test_plus_stranded_start(): l = Locus("1", 1, 100, strand="+") assert l.stranded_start == 1 def test_minus_stranded_start(): l = Locus("1", 1, 100, strand="-") assert l.stranded_start == 100 def test_end(simpleLocusView): assert simpleLocusView.end == 200 def test_plus_stranded_end(): l = Locus("1", 1, 100, strand="+") assert l.stranded_end == 100 def test_minus_stranded_end(): l = Locus("1", 1, 100, strand="-") assert l.stranded_end == 1 def test_coor(simpleLocusView): assert simpleLocusView.coor == (100, 200) def test_upstream(simpleLocusView): assert simpleLocusView.upstream(50) == 50 def test_upstream_minus_strand(simpleLocusView): l = Locus("1", 1, 100, strand="-") assert l.upstream(50) == 150 def test_downstream(simpleLocusView): assert simpleLocusView.downstream(50) == 250 def test_downstream_minus_strand(simpleLocusView): l = Locus("1", 100, 200, strand="-") assert l.downstream(50) == 50 def test_center(): l = Locus("1", 100, 200, strand="-") assert l.center == 150.5 def test_name(simpleLocusView): assert simpleLocusView.name == "x" def test_eq(simpleLocusView): another_Locus = Locus(1, 110, 220) assert simpleLocusView == simpleLocusView assert simpleLocusView != another_Locus def test_loci_lt_by_chrom(): x = Locus("1", 1, 1) y = Locus("2", 1, 1) assert x < y def test_loci_gt_by_chrom(): x = Locus("1", 1, 1) y = Locus("2", 1, 1) assert y > x def test_loci_lt_by_pos(): x = Locus("1", 1, 100) y = Locus("1", 2, 100) assert x < y def test_loci_gt_by_pos(): x = Locus("1", 1, 100) y = Locus("1", 2, 200) assert y > x def test_len(simpleLocusView): assert len(simpleLocusView) == 101 assert len(Locus(1, 100, 100)) == 1 def test_lt(simpleLocusView): same_chrom_Locus = Locus("1", 110, 220) diff_chrom_Locus = Locus("2", 90, 150) assert simpleLocusView < same_chrom_Locus assert simpleLocusView < diff_chrom_Locus def test_gt(simpleLocusView): same_chrom_Locus = Locus("1", 90, 150) diff_chrom_Locus = Locus("2", 90, 150) assert simpleLocusView > same_chrom_Locus assert diff_chrom_Locus > simpleLocusView def test_repr(simpleLocusView): assert repr(simpleLocusView) def test_subloci_getitem(): x = Locus("1", 1, 2) y = Locus("1", 3, 4, name="sublocus") x.add_sublocus(y) assert x.subloci[0].name == "sublocus" def test_subloci_iter(): x = Locus("1", 1, 2) y = Locus("1", 3, 4, name="sublocus1") z = Locus("1", 3, 4, name="sublocus2") x.add_sublocus(y) x.add_sublocus(z) for sub in x.subloci: assert sub.chromosome == "1" def test_subloci_len(): x = Locus("1", 1, 2) y = Locus("1", 3, 4, name="sublocus1") z = Locus("1", 3, 4, name="sublocus2") x.add_sublocus(y) x.add_sublocus(z) assert len(x.subloci) == 2 def test_attrs_keys_method(): x = Locus("1", 3, 4, attrs={"foo": "locus1", "bar": "baz"}) assert sorted(x.attrs.keys()) == ["bar", "foo"] def test_attrs_keys_method_empty(): x = Locus("1", 3, 4, attrs={}) assert len(list(x.attrs.keys())) == 0 def test_attrs_vals_method(): x = Locus("1", 3, 4, attrs={"foo": "locus1", "bar": "baz"}) assert len(sorted(x.attrs.values())) == 2 def test_attrs_vals_method_empty(): x = Locus("1", 3, 4, attrs={}) assert len(list(x.attrs.values())) == 0 def test_attrs_items(): x = Locus("1", 3, 4, attrs={"foo": "locus1", "bar": "baz"}) assert len(sorted(x.attrs.items())) == 2 def test_attrs_contains(): x = Locus("1", 3, 4, attrs={"foo": "locus1", "bar": "baz"}) assert "foo" in x.attrs def test_attrs_repr(): x = Locus("1", 3, 4, attrs={"foo": "locus1", "bar": "baz"}) assert repr(x.attrs) def test_le_equals(): x = Locus("1", 3, 4, attrs={"foo": "locus1", "bar": "baz"}) y = Locus("1", 3, 4) assert x <= y def test_le_less(): x = Locus("1", 3, 4, attrs={"foo": "locus1", "bar": "baz"}) y = Locus("1", 30, 40) assert x <= y def test_ge_equals(): x = Locus("1", 3, 4, attrs={"foo": "locus1", "bar": "baz"}) y = Locus("1", 3, 4) assert x >= y def test_ge_greater(): x = Locus("1", 30, 40, attrs={"foo": "locus1", "bar": "baz"}) y = Locus("1", 3, 4) assert x >= y def test_stranded_start_invalid(): # Strand cannot be '=' x = Locus("1", 3, 4, strand="=") with pytest.raises(StrandError): x.stranded_start def test_stranded_stop_invalid(): # Strand cannot be '=' x = Locus("1", 3, 4, strand="=") with pytest.raises(StrandError): x.stranded_end def test_as_record(): x = Locus("1", 3, 4, strand="+") # This doesn't compare the dictionaries of each ... assert x.as_record()[0] == ( "1", 3, 4, "locuspocus", "locus", "+", None, None, 2039807104618252476, ) def test_center_distance(): x = Locus("1", 1, 100, strand="+") # This needs to be 201 since x starts at 1 y = Locus("1", 201, 300, strand="=") assert x.center_distance(y) == 200 def test_center_distance_different_chroms(): x = Locus("1", 1, 100, strand="+") # This needs to be 201 since x starts at 1 y = Locus("2", 201, 300, strand="+") assert x.center_distance(y) == np.inf def test_str(): x = Locus("1", 1, 100, strand="+") assert str(x) == repr(x) def test_combine(): x = Locus("1", 1, 2) y = Locus("1", 3, 4) z = x.combine(y) assert z.start == 1 assert z.end == 4 assert x in z.subloci assert y in z.subloci def test_combine_chromosome_mismatch(): x = Locus("1", 1, 2) y = Locus("2", 3, 4) with pytest.raises(ChromosomeError): x.combine(y) def test_distance(): x = Locus("1", 1, 100) y = Locus("1", 150, 250) assert x.distance(y) == 49 def test_distance_diff_chroms(): x = Locus("1", 1, 100) y = Locus("2", 150, 250) assert x.distance(y) == np.inf def test_get_subloci_by_index(SimpleLoci): x = SimpleLoci["x"] assert x.subloci[0]
LinkageIO/LocusPocus
tests/test_LocusView.py
locuspocus/locus/__init__.py
import logging import re import reprlib import pprint import numpy as np from minus80 import Freezable from minus80.RawFile import RawFile from collections import defaultdict from functools import lru_cache from .chromosome import Chromosome class Fasta(Freezable): """ A pythonic interface to a FASTA file. This interface allows convenient slicing into contigs (chromosomes). >>> from locuspocus import Fasta >>> x = Fasta.from_file('example.fa') """ log = logging.getLogger(__name__) handler = logging.StreamHandler() formatter = logging.Formatter("%(asctime)s %(name)-12s %(levelname)-8s %(message)s") handler.setFormatter(formatter) if not len(log.handlers): log.addHandler(handler) log.setLevel(logging.INFO) def __init__(self, name, rootdir=None): """ Load a Fasta object from the Minus80. Parameters ---------- name : str The name of the frozen object Returns ------- A Fasta object """ super().__init__(name, rootdir=rootdir) # Load up from the database self._initialize_tables() def _initialize_tables(self): """ Initialize the tables for the FASTA class NOTE: internal method """ cur = self.m80.db.cursor() cur.execute( """ CREATE TABLE IF NOT EXISTS added_order ( aorder INTEGER PRIMARY KEY AUTOINCREMENT, name TEXT ); """ ) cur.execute( """ CREATE TABLE IF NOT EXISTS nicknames ( nickname TEXT, chrom TEXT, PRIMARY KEY(nickname,chrom), FOREIGN KEY(chrom) REFERENCES chroms(chrom) ) """ ) cur.execute( """ CREATE TABLE IF NOT EXISTS attributes ( chrom TEXT, attribute TEXT, PRIMARY KEY(chrom,attribute), FOREIGN KEY(chrom) REFERENCES chroms(chrom) ) """ ) def add_chrom(self, chrom, replace=False, cur=None): """ Add a chromosome to the Fasta object. Parameters ---------- chrom : Chromosome object The chromosome object to add. See LocusPocus.Chromosome replace : bool (default: False) By default a chromosome can only be added once. If this is set, the chromosome object will be replaced. """ self.log.info(f"Adding {chrom.name}") # Check for duplicates if chrom.name in self: if not replace: raise ValueError(f"{chrom.name} already in FASTA") else: if cur is None: cur = self.m80.db.cursor() cur.execute( """ INSERT OR REPLACE INTO added_order (name) VALUES (?) """, (chrom.name,), ) for x in chrom._attrs: self._add_attribute(chrom.name, x) seqarray = chrom.seq self.m80.col[chrom.name] = seqarray self.cache_clear() def del_chrom(self, chrom): """ Delete a chromosome from the database """ if isinstance(chrom, Chromosome): name = chrom.name elif isinstance(chrom, str): name = chrom else: raise ValueError(f"input must be a Chromosome object or a string") if name not in self: raise ValueError(f"'{name}' not in the {self.m80.dtype}('{self.m80.name}')") self.m80.db.cursor().execute( """ DELETE FROM added_order WHERE name = ?; DELETE FROM nicknames WHERE chrom = ?; DELETE FROM attributes WHERE chrom = ?; """, (name, name, name), ) self.m80.col.remove(name) def chrom_names(self): """ Returns an iterable of chromosome names Parameters ---------- None Returns ------- An iterable of chromosome names in added order """ return ( x for (x,) in self.m80.db.cursor().execute( """ SELECT name FROM added_order ORDER BY aorder """ ) ) def cache_clear(self): self.__getitem__.cache_clear() @classmethod def from_file(cls, name, fasta_file, replace=False, rootdir=None): """ Create a Fasta object from a file. """ self = cls(name, rootdir=rootdir) with RawFile(fasta_file) as IN, self.m80.db.bulk_transaction() as cur: cur_chrom = None seqs = [] name, attrs = None, None for line in IN: line = line.strip() if line.startswith(">"): # Finish the last chromosome before adding a new one if len(seqs) > 0: cur_chrom = Chromosome(name, seqs, *attrs) self.add_chrom(cur_chrom, cur=cur, replace=replace) seqs = [] name, *attrs = line.lstrip(">").split() else: seqs += line # cur_chrom.seq = np.append(cur_chrom.seq,list(line)) # Add the last chromosome cur_chrom = Chromosome(name, seqs, *attrs) self.add_chrom(cur_chrom, cur=cur, replace=replace) return self def __iter__(self): """ Iterate over chromosome objects """ chroms = self.m80.db.cursor().execute( "SELECT name FROM added_order ORDER BY aorder" ) for (chrom,) in chroms: yield self[chrom] def __len__(self): """ Returns the number of chroms in the Fasta """ return ( self.m80.db.cursor() .execute( """ SELECT COUNT(*) FROM added_order """ ) .fetchone()[0] ) def __contains__(self, obj): """ Returns boolean indicating if a named contig (chromosome) is in the fasta. """ if isinstance(obj, Chromosome): obj = obj.name cur = self.m80.db.cursor() # Check if in chrom names in_added = cur.execute( """ SELECT COUNT(*) FROM added_order WHERE name = ? """, (obj,), ).fetchone()[0] if in_added == 1: return True # Check if in aliases in_alias = cur.execute( """ SELECT COUNT(*) FROM nicknames WHERE nickname = ? """, (obj,), ).fetchone()[0] if in_alias == 1: return True # Otherise its not here return False @lru_cache(maxsize=128) def __getitem__(self, chrom_name): if chrom_name not in self: raise ValueError(f"{chrom_name} not in {self.m80.name}") try: seq_array = self.m80.col[chrom_name] except Exception as e: chrom_name = self._get_nickname(chrom_name) seq_array = self.m80.col[chrom_name] finally: attrs = [ x[0] for x in self.m80.db.cursor().execute( """ SELECT attribute FROM attributes WHERE chrom = ? ORDER BY rowid -- This preserves the ordering of attrs """, (chrom_name,), ) ] return Chromosome(chrom_name, seq_array, *attrs) def to_fasta(self, filename, line_length=70): """ Print the chromosomes to a file in FASTA format Paramaters ---------- filename : str The output filename line_length : int (default: 70) The number of nucleotides per line Returns ------- None """ with open(filename, "w") as OUT: for chrom_name in self.chrom_names(): print(f"Printing out {chrom_name}") chrom = self[chrom_name] # easy_id = ids[chrom_name] start_length = len(chrom) # if easy_id == 'chrUn': # easy_id = easy_id + '_' + chrom_name print(f'>{chrom_name} {"|".join(chrom._attrs)}', file=OUT) printed_length = 0 for i in range(1, len(chrom), 70): sequence = chrom[i : (i - 1) + 70] print("".join(sequence), file=OUT) printed_length += len(sequence) if printed_length != start_length: # pragma: no cover raise ValueError("Chromosome was truncated during printing") return None def _add_attribute(self, chrom_name, attr, cur=None): """ Add an attribute the the Fasta object. Attributes describe chromosomes and often follow the '>' token in the FASTA file. Parameters ---------- chrom_name : str The name of the chromosome you are adding an attribute to attr : str the attribute you are adding """ if cur is None: cur = self.m80.db.cursor() cur.execute( """ INSERT INTO attributes (chrom,attribute) VALUES (?,?) """, (chrom_name, attr), ) self.cache_clear() def _add_nickname(self, chrom, nickname, cur=None): """ Add a nickname for a chromosome Parameters ---------- chrom : str The chromosome you want to nickname nickname : str The alternative name for the chromosome """ if cur is None: cur = self.m80.db.cursor() cur.execute( """ INSERT OR REPLACE INTO nicknames (nickname,chrom) VALUES (?,?) """, (nickname, chrom), ) def _get_nickname(self, nickname): """ Get a chromosomem name by nickname """ return ( self.m80.db.cursor() .execute( """ SELECT chrom FROM nicknames WHERE nickname = ? """, (nickname,), ) .fetchone()[0] ) def __repr__(self): # pragma: nocover return pprint.saferepr(reprlib.repr(list(self)))
import pytest import numpy as np import minus80 as m80 from locuspocus import Locus, Loci from locuspocus.exceptions import StrandError, ChromosomeError @pytest.fixture(scope="module") def SimpleLoci(): a = Locus("1", 10, 20) b = Locus("1", 20, 30) c = Locus("2", 30, 40) d = Locus("2", 40, 50) x = Locus("1", 100, 200, attrs={"foo": "bar"}, name="x", subloci=[a, b]) y = Locus("2", 100, 200, attrs={"foo": "bar"}, name="y", subloci=[c, d]) if m80.exists("Loci", "test"): m80.delete("Loci", "test") ref = Loci("test") ref.add_locus(x) ref.add_locus(y) return ref @pytest.fixture def simpleLocusView(SimpleLoci): return SimpleLoci["x"] def test_initialization(simpleLocusView): # numeric chromosomes assert simpleLocusView.chromosome == "1" assert simpleLocusView.start == 100 assert simpleLocusView.end == 200 assert len(simpleLocusView) == 101 def test_getitem(simpleLocusView): assert simpleLocusView["foo"] == "bar" def test_default_getitem(simpleLocusView): assert simpleLocusView.default_getitem("name", "default") == "default" def test_start(simpleLocusView): assert simpleLocusView.start == 100 def test_plus_stranded_start(): l = Locus("1", 1, 100, strand="+") assert l.stranded_start == 1 def test_minus_stranded_start(): l = Locus("1", 1, 100, strand="-") assert l.stranded_start == 100 def test_end(simpleLocusView): assert simpleLocusView.end == 200 def test_plus_stranded_end(): l = Locus("1", 1, 100, strand="+") assert l.stranded_end == 100 def test_minus_stranded_end(): l = Locus("1", 1, 100, strand="-") assert l.stranded_end == 1 def test_coor(simpleLocusView): assert simpleLocusView.coor == (100, 200) def test_upstream(simpleLocusView): assert simpleLocusView.upstream(50) == 50 def test_upstream_minus_strand(simpleLocusView): l = Locus("1", 1, 100, strand="-") assert l.upstream(50) == 150 def test_downstream(simpleLocusView): assert simpleLocusView.downstream(50) == 250 def test_downstream_minus_strand(simpleLocusView): l = Locus("1", 100, 200, strand="-") assert l.downstream(50) == 50 def test_center(): l = Locus("1", 100, 200, strand="-") assert l.center == 150.5 def test_name(simpleLocusView): assert simpleLocusView.name == "x" def test_eq(simpleLocusView): another_Locus = Locus(1, 110, 220) assert simpleLocusView == simpleLocusView assert simpleLocusView != another_Locus def test_loci_lt_by_chrom(): x = Locus("1", 1, 1) y = Locus("2", 1, 1) assert x < y def test_loci_gt_by_chrom(): x = Locus("1", 1, 1) y = Locus("2", 1, 1) assert y > x def test_loci_lt_by_pos(): x = Locus("1", 1, 100) y = Locus("1", 2, 100) assert x < y def test_loci_gt_by_pos(): x = Locus("1", 1, 100) y = Locus("1", 2, 200) assert y > x def test_len(simpleLocusView): assert len(simpleLocusView) == 101 assert len(Locus(1, 100, 100)) == 1 def test_lt(simpleLocusView): same_chrom_Locus = Locus("1", 110, 220) diff_chrom_Locus = Locus("2", 90, 150) assert simpleLocusView < same_chrom_Locus assert simpleLocusView < diff_chrom_Locus def test_gt(simpleLocusView): same_chrom_Locus = Locus("1", 90, 150) diff_chrom_Locus = Locus("2", 90, 150) assert simpleLocusView > same_chrom_Locus assert diff_chrom_Locus > simpleLocusView def test_repr(simpleLocusView): assert repr(simpleLocusView) def test_subloci_getitem(): x = Locus("1", 1, 2) y = Locus("1", 3, 4, name="sublocus") x.add_sublocus(y) assert x.subloci[0].name == "sublocus" def test_subloci_iter(): x = Locus("1", 1, 2) y = Locus("1", 3, 4, name="sublocus1") z = Locus("1", 3, 4, name="sublocus2") x.add_sublocus(y) x.add_sublocus(z) for sub in x.subloci: assert sub.chromosome == "1" def test_subloci_len(): x = Locus("1", 1, 2) y = Locus("1", 3, 4, name="sublocus1") z = Locus("1", 3, 4, name="sublocus2") x.add_sublocus(y) x.add_sublocus(z) assert len(x.subloci) == 2 def test_attrs_keys_method(): x = Locus("1", 3, 4, attrs={"foo": "locus1", "bar": "baz"}) assert sorted(x.attrs.keys()) == ["bar", "foo"] def test_attrs_keys_method_empty(): x = Locus("1", 3, 4, attrs={}) assert len(list(x.attrs.keys())) == 0 def test_attrs_vals_method(): x = Locus("1", 3, 4, attrs={"foo": "locus1", "bar": "baz"}) assert len(sorted(x.attrs.values())) == 2 def test_attrs_vals_method_empty(): x = Locus("1", 3, 4, attrs={}) assert len(list(x.attrs.values())) == 0 def test_attrs_items(): x = Locus("1", 3, 4, attrs={"foo": "locus1", "bar": "baz"}) assert len(sorted(x.attrs.items())) == 2 def test_attrs_contains(): x = Locus("1", 3, 4, attrs={"foo": "locus1", "bar": "baz"}) assert "foo" in x.attrs def test_attrs_repr(): x = Locus("1", 3, 4, attrs={"foo": "locus1", "bar": "baz"}) assert repr(x.attrs) def test_le_equals(): x = Locus("1", 3, 4, attrs={"foo": "locus1", "bar": "baz"}) y = Locus("1", 3, 4) assert x <= y def test_le_less(): x = Locus("1", 3, 4, attrs={"foo": "locus1", "bar": "baz"}) y = Locus("1", 30, 40) assert x <= y def test_ge_equals(): x = Locus("1", 3, 4, attrs={"foo": "locus1", "bar": "baz"}) y = Locus("1", 3, 4) assert x >= y def test_ge_greater(): x = Locus("1", 30, 40, attrs={"foo": "locus1", "bar": "baz"}) y = Locus("1", 3, 4) assert x >= y def test_stranded_start_invalid(): # Strand cannot be '=' x = Locus("1", 3, 4, strand="=") with pytest.raises(StrandError): x.stranded_start def test_stranded_stop_invalid(): # Strand cannot be '=' x = Locus("1", 3, 4, strand="=") with pytest.raises(StrandError): x.stranded_end def test_as_record(): x = Locus("1", 3, 4, strand="+") # This doesn't compare the dictionaries of each ... assert x.as_record()[0] == ( "1", 3, 4, "locuspocus", "locus", "+", None, None, 2039807104618252476, ) def test_center_distance(): x = Locus("1", 1, 100, strand="+") # This needs to be 201 since x starts at 1 y = Locus("1", 201, 300, strand="=") assert x.center_distance(y) == 200 def test_center_distance_different_chroms(): x = Locus("1", 1, 100, strand="+") # This needs to be 201 since x starts at 1 y = Locus("2", 201, 300, strand="+") assert x.center_distance(y) == np.inf def test_str(): x = Locus("1", 1, 100, strand="+") assert str(x) == repr(x) def test_combine(): x = Locus("1", 1, 2) y = Locus("1", 3, 4) z = x.combine(y) assert z.start == 1 assert z.end == 4 assert x in z.subloci assert y in z.subloci def test_combine_chromosome_mismatch(): x = Locus("1", 1, 2) y = Locus("2", 3, 4) with pytest.raises(ChromosomeError): x.combine(y) def test_distance(): x = Locus("1", 1, 100) y = Locus("1", 150, 250) assert x.distance(y) == 49 def test_distance_diff_chroms(): x = Locus("1", 1, 100) y = Locus("2", 150, 250) assert x.distance(y) == np.inf def test_get_subloci_by_index(SimpleLoci): x = SimpleLoci["x"] assert x.subloci[0]
LinkageIO/LocusPocus
tests/test_LocusView.py
locuspocus/fasta.py
from typing import List, cast import numpy as np from pandas._typing import FilePathOrBuffer, Scalar, StorageOptions from pandas.compat._optional import import_optional_dependency import pandas as pd from pandas.io.excel._base import BaseExcelReader class ODFReader(BaseExcelReader): """ Read tables out of OpenDocument formatted files. Parameters ---------- filepath_or_buffer : string, path to be parsed or an open readable stream. storage_options : dict, optional passed to fsspec for appropriate URLs (see ``_get_filepath_or_buffer``) """ def __init__( self, filepath_or_buffer: FilePathOrBuffer, storage_options: StorageOptions = None, ): import_optional_dependency("odf") super().__init__(filepath_or_buffer, storage_options=storage_options) @property def _workbook_class(self): from odf.opendocument import OpenDocument return OpenDocument def load_workbook(self, filepath_or_buffer: FilePathOrBuffer): from odf.opendocument import load return load(filepath_or_buffer) @property def empty_value(self) -> str: """Property for compat with other readers.""" return "" @property def sheet_names(self) -> List[str]: """Return a list of sheet names present in the document""" from odf.table import Table tables = self.book.getElementsByType(Table) return [t.getAttribute("name") for t in tables] def get_sheet_by_index(self, index: int): from odf.table import Table tables = self.book.getElementsByType(Table) return tables[index] def get_sheet_by_name(self, name: str): from odf.table import Table tables = self.book.getElementsByType(Table) for table in tables: if table.getAttribute("name") == name: return table self.close() raise ValueError(f"sheet {name} not found") def get_sheet_data(self, sheet, convert_float: bool) -> List[List[Scalar]]: """ Parse an ODF Table into a list of lists """ from odf.table import CoveredTableCell, TableCell, TableRow covered_cell_name = CoveredTableCell().qname table_cell_name = TableCell().qname cell_names = {covered_cell_name, table_cell_name} sheet_rows = sheet.getElementsByType(TableRow) empty_rows = 0 max_row_len = 0 table: List[List[Scalar]] = [] for i, sheet_row in enumerate(sheet_rows): sheet_cells = [x for x in sheet_row.childNodes if x.qname in cell_names] empty_cells = 0 table_row: List[Scalar] = [] for j, sheet_cell in enumerate(sheet_cells): if sheet_cell.qname == table_cell_name: value = self._get_cell_value(sheet_cell, convert_float) else: value = self.empty_value column_repeat = self._get_column_repeat(sheet_cell) # Queue up empty values, writing only if content succeeds them if value == self.empty_value: empty_cells += column_repeat else: table_row.extend([self.empty_value] * empty_cells) empty_cells = 0 table_row.extend([value] * column_repeat) if max_row_len < len(table_row): max_row_len = len(table_row) row_repeat = self._get_row_repeat(sheet_row) if self._is_empty_row(sheet_row): empty_rows += row_repeat else: # add blank rows to our table table.extend([[self.empty_value]] * empty_rows) empty_rows = 0 for _ in range(row_repeat): table.append(table_row) # Make our table square for row in table: if len(row) < max_row_len: row.extend([self.empty_value] * (max_row_len - len(row))) return table def _get_row_repeat(self, row) -> int: """ Return number of times this row was repeated Repeating an empty row appeared to be a common way of representing sparse rows in the table. """ from odf.namespaces import TABLENS return int(row.attributes.get((TABLENS, "number-rows-repeated"), 1)) def _get_column_repeat(self, cell) -> int: from odf.namespaces import TABLENS return int(cell.attributes.get((TABLENS, "number-columns-repeated"), 1)) def _is_empty_row(self, row) -> bool: """ Helper function to find empty rows """ for column in row.childNodes: if len(column.childNodes) > 0: return False return True def _get_cell_value(self, cell, convert_float: bool) -> Scalar: from odf.namespaces import OFFICENS if str(cell) == "#N/A": return np.nan cell_type = cell.attributes.get((OFFICENS, "value-type")) if cell_type == "boolean": if str(cell) == "TRUE": return True return False if cell_type is None: return self.empty_value elif cell_type == "float": # GH5394 cell_value = float(cell.attributes.get((OFFICENS, "value"))) if convert_float: val = int(cell_value) if val == cell_value: return val return cell_value elif cell_type == "percentage": cell_value = cell.attributes.get((OFFICENS, "value")) return float(cell_value) elif cell_type == "string": return self._get_cell_string_value(cell) elif cell_type == "currency": cell_value = cell.attributes.get((OFFICENS, "value")) return float(cell_value) elif cell_type == "date": cell_value = cell.attributes.get((OFFICENS, "date-value")) return pd.to_datetime(cell_value) elif cell_type == "time": result = pd.to_datetime(str(cell)) result = cast(pd.Timestamp, result) return result.time() else: self.close() raise ValueError(f"Unrecognized type {cell_type}") def _get_cell_string_value(self, cell) -> str: """ Find and decode OpenDocument text:s tags that represent a run length encoded sequence of space characters. """ from odf.element import Element from odf.namespaces import TEXTNS from odf.text import S text_s = S().qname value = [] for fragment in cell.childNodes: if isinstance(fragment, Element): if fragment.qname == text_s: spaces = int(fragment.attributes.get((TEXTNS, "c"), 1)) value.append(" " * spaces) else: # recursive impl needed in case of nested fragments # with multiple spaces # https://github.com/pandas-dev/pandas/pull/36175#discussion_r484639704 value.append(self._get_cell_string_value(fragment)) else: value.append(str(fragment)) return "".join(value)
import numpy as np import pytest import pandas as pd from pandas import ( Categorical, DataFrame, DatetimeIndex, Index, Interval, IntervalIndex, Series, TimedeltaIndex, Timestamp, cut, date_range, isna, qcut, timedelta_range, to_datetime, ) import pandas._testing as tm from pandas.api.types import CategoricalDtype as CDT import pandas.core.reshape.tile as tmod def test_simple(): data = np.ones(5, dtype="int64") result = cut(data, 4, labels=False) expected = np.array([1, 1, 1, 1, 1]) tm.assert_numpy_array_equal(result, expected, check_dtype=False) def test_bins(): data = np.array([0.2, 1.4, 2.5, 6.2, 9.7, 2.1]) result, bins = cut(data, 3, retbins=True) intervals = IntervalIndex.from_breaks(bins.round(3)) intervals = intervals.take([0, 0, 0, 1, 2, 0]) expected = Categorical(intervals, ordered=True) tm.assert_categorical_equal(result, expected) tm.assert_almost_equal(bins, np.array([0.1905, 3.36666667, 6.53333333, 9.7])) def test_right(): data = np.array([0.2, 1.4, 2.5, 6.2, 9.7, 2.1, 2.575]) result, bins = cut(data, 4, right=True, retbins=True) intervals = IntervalIndex.from_breaks(bins.round(3)) expected = Categorical(intervals, ordered=True) expected = expected.take([0, 0, 0, 2, 3, 0, 0]) tm.assert_categorical_equal(result, expected) tm.assert_almost_equal(bins, np.array([0.1905, 2.575, 4.95, 7.325, 9.7])) def test_no_right(): data = np.array([0.2, 1.4, 2.5, 6.2, 9.7, 2.1, 2.575]) result, bins = cut(data, 4, right=False, retbins=True) intervals = IntervalIndex.from_breaks(bins.round(3), closed="left") intervals = intervals.take([0, 0, 0, 2, 3, 0, 1]) expected = Categorical(intervals, ordered=True) tm.assert_categorical_equal(result, expected) tm.assert_almost_equal(bins, np.array([0.2, 2.575, 4.95, 7.325, 9.7095])) def test_array_like(): data = [0.2, 1.4, 2.5, 6.2, 9.7, 2.1] result, bins = cut(data, 3, retbins=True) intervals = IntervalIndex.from_breaks(bins.round(3)) intervals = intervals.take([0, 0, 0, 1, 2, 0]) expected = Categorical(intervals, ordered=True) tm.assert_categorical_equal(result, expected) tm.assert_almost_equal(bins, np.array([0.1905, 3.36666667, 6.53333333, 9.7])) def test_bins_from_interval_index(): c = cut(range(5), 3) expected = c result = cut(range(5), bins=expected.categories) tm.assert_categorical_equal(result, expected) expected = Categorical.from_codes( np.append(c.codes, -1), categories=c.categories, ordered=True ) result = cut(range(6), bins=expected.categories) tm.assert_categorical_equal(result, expected) def test_bins_from_interval_index_doc_example(): # Make sure we preserve the bins. ages = np.array([10, 15, 13, 12, 23, 25, 28, 59, 60]) c = cut(ages, bins=[0, 18, 35, 70]) expected = IntervalIndex.from_tuples([(0, 18), (18, 35), (35, 70)]) tm.assert_index_equal(c.categories, expected) result = cut([25, 20, 50], bins=c.categories) tm.assert_index_equal(result.categories, expected) tm.assert_numpy_array_equal(result.codes, np.array([1, 1, 2], dtype="int8")) def test_bins_not_overlapping_from_interval_index(): # see gh-23980 msg = "Overlapping IntervalIndex is not accepted" ii = IntervalIndex.from_tuples([(0, 10), (2, 12), (4, 14)]) with pytest.raises(ValueError, match=msg): cut([5, 6], bins=ii) def test_bins_not_monotonic(): msg = "bins must increase monotonically" data = [0.2, 1.4, 2.5, 6.2, 9.7, 2.1] with pytest.raises(ValueError, match=msg): cut(data, [0.1, 1.5, 1, 10]) @pytest.mark.parametrize( "x, bins, expected", [ ( date_range("2017-12-31", periods=3), [Timestamp.min, Timestamp("2018-01-01"), Timestamp.max], IntervalIndex.from_tuples( [ (Timestamp.min, Timestamp("2018-01-01")), (Timestamp("2018-01-01"), Timestamp.max), ] ), ), ( [-1, 0, 1], np.array( [np.iinfo(np.int64).min, 0, np.iinfo(np.int64).max], dtype="int64" ), IntervalIndex.from_tuples( [(np.iinfo(np.int64).min, 0), (0, np.iinfo(np.int64).max)] ), ), ( [np.timedelta64(-1), np.timedelta64(0), np.timedelta64(1)], np.array( [ np.timedelta64(-np.iinfo(np.int64).max), np.timedelta64(0), np.timedelta64(np.iinfo(np.int64).max), ] ), IntervalIndex.from_tuples( [ (np.timedelta64(-np.iinfo(np.int64).max), np.timedelta64(0)), (np.timedelta64(0), np.timedelta64(np.iinfo(np.int64).max)), ] ), ), ], ) def test_bins_monotonic_not_overflowing(x, bins, expected): # GH 26045 result = cut(x, bins) tm.assert_index_equal(result.categories, expected) def test_wrong_num_labels(): msg = "Bin labels must be one fewer than the number of bin edges" data = [0.2, 1.4, 2.5, 6.2, 9.7, 2.1] with pytest.raises(ValueError, match=msg): cut(data, [0, 1, 10], labels=["foo", "bar", "baz"]) @pytest.mark.parametrize( "x,bins,msg", [ ([], 2, "Cannot cut empty array"), ([1, 2, 3], 0.5, "`bins` should be a positive integer"), ], ) def test_cut_corner(x, bins, msg): with pytest.raises(ValueError, match=msg): cut(x, bins) @pytest.mark.parametrize("arg", [2, np.eye(2), DataFrame(np.eye(2))]) @pytest.mark.parametrize("cut_func", [cut, qcut]) def test_cut_not_1d_arg(arg, cut_func): msg = "Input array must be 1 dimensional" with pytest.raises(ValueError, match=msg): cut_func(arg, 2) @pytest.mark.parametrize( "data", [ [0, 1, 2, 3, 4, np.inf], [-np.inf, 0, 1, 2, 3, 4], [-np.inf, 0, 1, 2, 3, 4, np.inf], ], ) def test_int_bins_with_inf(data): # GH 24314 msg = "cannot specify integer `bins` when input data contains infinity" with pytest.raises(ValueError, match=msg): cut(data, bins=3) def test_cut_out_of_range_more(): # see gh-1511 name = "x" ser = Series([0, -1, 0, 1, -3], name=name) ind = cut(ser, [0, 1], labels=False) exp = Series([np.nan, np.nan, np.nan, 0, np.nan], name=name) tm.assert_series_equal(ind, exp) @pytest.mark.parametrize( "right,breaks,closed", [ (True, [-1e-3, 0.25, 0.5, 0.75, 1], "right"), (False, [0, 0.25, 0.5, 0.75, 1 + 1e-3], "left"), ], ) def test_labels(right, breaks, closed): arr = np.tile(np.arange(0, 1.01, 0.1), 4) result, bins = cut(arr, 4, retbins=True, right=right) ex_levels = IntervalIndex.from_breaks(breaks, closed=closed) tm.assert_index_equal(result.categories, ex_levels) def test_cut_pass_series_name_to_factor(): name = "foo" ser = Series(np.random.randn(100), name=name) factor = cut(ser, 4) assert factor.name == name def test_label_precision(): arr = np.arange(0, 0.73, 0.01) result = cut(arr, 4, precision=2) ex_levels = IntervalIndex.from_breaks([-0.00072, 0.18, 0.36, 0.54, 0.72]) tm.assert_index_equal(result.categories, ex_levels) @pytest.mark.parametrize("labels", [None, False]) def test_na_handling(labels): arr = np.arange(0, 0.75, 0.01) arr[::3] = np.nan result = cut(arr, 4, labels=labels) result = np.asarray(result) expected = np.where(isna(arr), np.nan, result) tm.assert_almost_equal(result, expected) def test_inf_handling(): data = np.arange(6) data_ser = Series(data, dtype="int64") bins = [-np.inf, 2, 4, np.inf] result = cut(data, bins) result_ser = cut(data_ser, bins) ex_uniques = IntervalIndex.from_breaks(bins) tm.assert_index_equal(result.categories, ex_uniques) assert result[5] == Interval(4, np.inf) assert result[0] == Interval(-np.inf, 2) assert result_ser[5] == Interval(4, np.inf) assert result_ser[0] == Interval(-np.inf, 2) def test_cut_out_of_bounds(): arr = np.random.randn(100) result = cut(arr, [-1, 0, 1]) mask = isna(result) ex_mask = (arr < -1) | (arr > 1) tm.assert_numpy_array_equal(mask, ex_mask) @pytest.mark.parametrize( "get_labels,get_expected", [ ( lambda labels: labels, lambda labels: Categorical( ["Medium"] + 4 * ["Small"] + ["Medium", "Large"], categories=labels, ordered=True, ), ), ( lambda labels: Categorical.from_codes([0, 1, 2], labels), lambda labels: Categorical.from_codes([1] + 4 * [0] + [1, 2], labels), ), ], ) def test_cut_pass_labels(get_labels, get_expected): bins = [0, 25, 50, 100] arr = [50, 5, 10, 15, 20, 30, 70] labels = ["Small", "Medium", "Large"] result = cut(arr, bins, labels=get_labels(labels)) tm.assert_categorical_equal(result, get_expected(labels)) def test_cut_pass_labels_compat(): # see gh-16459 arr = [50, 5, 10, 15, 20, 30, 70] labels = ["Good", "Medium", "Bad"] result = cut(arr, 3, labels=labels) exp = cut(arr, 3, labels=Categorical(labels, categories=labels, ordered=True)) tm.assert_categorical_equal(result, exp) @pytest.mark.parametrize("x", [np.arange(11.0), np.arange(11.0) / 1e10]) def test_round_frac_just_works(x): # It works. cut(x, 2) @pytest.mark.parametrize( "val,precision,expected", [ (-117.9998, 3, -118), (117.9998, 3, 118), (117.9998, 2, 118), (0.000123456, 2, 0.00012), ], ) def test_round_frac(val, precision, expected): # see gh-1979 result = tmod._round_frac(val, precision=precision) assert result == expected def test_cut_return_intervals(): ser = Series([0, 1, 2, 3, 4, 5, 6, 7, 8]) result = cut(ser, 3) exp_bins = np.linspace(0, 8, num=4).round(3) exp_bins[0] -= 0.008 expected = Series( IntervalIndex.from_breaks(exp_bins, closed="right").take( [0, 0, 0, 1, 1, 1, 2, 2, 2] ) ).astype(CDT(ordered=True)) tm.assert_series_equal(result, expected) def test_series_ret_bins(): # see gh-8589 ser = Series(np.arange(4)) result, bins = cut(ser, 2, retbins=True) expected = Series( IntervalIndex.from_breaks([-0.003, 1.5, 3], closed="right").repeat(2) ).astype(CDT(ordered=True)) tm.assert_series_equal(result, expected) @pytest.mark.parametrize( "kwargs,msg", [ ({"duplicates": "drop"}, None), ({}, "Bin edges must be unique"), ({"duplicates": "raise"}, "Bin edges must be unique"), ({"duplicates": "foo"}, "invalid value for 'duplicates' parameter"), ], ) def test_cut_duplicates_bin(kwargs, msg): # see gh-20947 bins = [0, 2, 4, 6, 10, 10] values = Series(np.array([1, 3, 5, 7, 9]), index=["a", "b", "c", "d", "e"]) if msg is not None: with pytest.raises(ValueError, match=msg): cut(values, bins, **kwargs) else: result = cut(values, bins, **kwargs) expected = cut(values, pd.unique(bins)) tm.assert_series_equal(result, expected) @pytest.mark.parametrize("data", [9.0, -9.0, 0.0]) @pytest.mark.parametrize("length", [1, 2]) def test_single_bin(data, length): # see gh-14652, gh-15428 ser = Series([data] * length) result = cut(ser, 1, labels=False) expected = Series([0] * length) tm.assert_series_equal(result, expected) @pytest.mark.parametrize( "array_1_writeable,array_2_writeable", [(True, True), (True, False), (False, False)] ) def test_cut_read_only(array_1_writeable, array_2_writeable): # issue 18773 array_1 = np.arange(0, 100, 10) array_1.flags.writeable = array_1_writeable array_2 = np.arange(0, 100, 10) array_2.flags.writeable = array_2_writeable hundred_elements = np.arange(100) tm.assert_categorical_equal( cut(hundred_elements, array_1), cut(hundred_elements, array_2) ) @pytest.mark.parametrize( "conv", [ lambda v: Timestamp(v), lambda v: to_datetime(v), lambda v: np.datetime64(v), lambda v: Timestamp(v).to_pydatetime(), ], ) def test_datetime_bin(conv): data = [np.datetime64("2012-12-13"), np.datetime64("2012-12-15")] bin_data = ["2012-12-12", "2012-12-14", "2012-12-16"] expected = Series( IntervalIndex( [ Interval(Timestamp(bin_data[0]), Timestamp(bin_data[1])), Interval(Timestamp(bin_data[1]), Timestamp(bin_data[2])), ] ) ).astype(CDT(ordered=True)) bins = [conv(v) for v in bin_data] result = Series(cut(data, bins=bins)) tm.assert_series_equal(result, expected) @pytest.mark.parametrize( "data", [ to_datetime(Series(["2013-01-01", "2013-01-02", "2013-01-03"])), [ np.datetime64("2013-01-01"), np.datetime64("2013-01-02"), np.datetime64("2013-01-03"), ], np.array( [ np.datetime64("2013-01-01"), np.datetime64("2013-01-02"), np.datetime64("2013-01-03"), ] ), DatetimeIndex(["2013-01-01", "2013-01-02", "2013-01-03"]), ], ) def test_datetime_cut(data): # see gh-14714 # # Testing time data when it comes in various collection types. result, _ = cut(data, 3, retbins=True) expected = Series( IntervalIndex( [ Interval( Timestamp("2012-12-31 23:57:07.200000"), Timestamp("2013-01-01 16:00:00"), ), Interval( Timestamp("2013-01-01 16:00:00"), Timestamp("2013-01-02 08:00:00") ), Interval( Timestamp("2013-01-02 08:00:00"), Timestamp("2013-01-03 00:00:00") ), ] ) ).astype(CDT(ordered=True)) tm.assert_series_equal(Series(result), expected) @pytest.mark.parametrize( "bins", [ 3, [ Timestamp("2013-01-01 04:57:07.200000"), Timestamp("2013-01-01 21:00:00"), Timestamp("2013-01-02 13:00:00"), Timestamp("2013-01-03 05:00:00"), ], ], ) @pytest.mark.parametrize("box", [list, np.array, Index, Series]) def test_datetime_tz_cut(bins, box): # see gh-19872 tz = "US/Eastern" s = Series(date_range("20130101", periods=3, tz=tz)) if not isinstance(bins, int): bins = box(bins) result = cut(s, bins) expected = Series( IntervalIndex( [ Interval( Timestamp("2012-12-31 23:57:07.200000", tz=tz), Timestamp("2013-01-01 16:00:00", tz=tz), ), Interval( Timestamp("2013-01-01 16:00:00", tz=tz), Timestamp("2013-01-02 08:00:00", tz=tz), ), Interval( Timestamp("2013-01-02 08:00:00", tz=tz), Timestamp("2013-01-03 00:00:00", tz=tz), ), ] ) ).astype(CDT(ordered=True)) tm.assert_series_equal(result, expected) def test_datetime_nan_error(): msg = "bins must be of datetime64 dtype" with pytest.raises(ValueError, match=msg): cut(date_range("20130101", periods=3), bins=[0, 2, 4]) def test_datetime_nan_mask(): result = cut( date_range("20130102", periods=5), bins=date_range("20130101", periods=2) ) mask = result.categories.isna() tm.assert_numpy_array_equal(mask, np.array([False])) mask = result.isna() tm.assert_numpy_array_equal(mask, np.array([False, True, True, True, True])) @pytest.mark.parametrize("tz", [None, "UTC", "US/Pacific"]) def test_datetime_cut_roundtrip(tz): # see gh-19891 ser = Series(date_range("20180101", periods=3, tz=tz)) result, result_bins = cut(ser, 2, retbins=True) expected = cut(ser, result_bins) tm.assert_series_equal(result, expected) expected_bins = DatetimeIndex( ["2017-12-31 23:57:07.200000", "2018-01-02 00:00:00", "2018-01-03 00:00:00"] ) expected_bins = expected_bins.tz_localize(tz) tm.assert_index_equal(result_bins, expected_bins) def test_timedelta_cut_roundtrip(): # see gh-19891 ser = Series(timedelta_range("1day", periods=3)) result, result_bins = cut(ser, 2, retbins=True) expected = cut(ser, result_bins) tm.assert_series_equal(result, expected) expected_bins = TimedeltaIndex( ["0 days 23:57:07.200000", "2 days 00:00:00", "3 days 00:00:00"] ) tm.assert_index_equal(result_bins, expected_bins) @pytest.mark.parametrize("bins", [6, 7]) @pytest.mark.parametrize( "box, compare", [ (Series, tm.assert_series_equal), (np.array, tm.assert_categorical_equal), (list, tm.assert_equal), ], ) def test_cut_bool_coercion_to_int(bins, box, compare): # issue 20303 data_expected = box([0, 1, 1, 0, 1] * 10) data_result = box([False, True, True, False, True] * 10) expected = cut(data_expected, bins, duplicates="drop") result = cut(data_result, bins, duplicates="drop") compare(result, expected) @pytest.mark.parametrize("labels", ["foo", 1, True]) def test_cut_incorrect_labels(labels): # GH 13318 values = range(5) msg = "Bin labels must either be False, None or passed in as a list-like argument" with pytest.raises(ValueError, match=msg): cut(values, 4, labels=labels) @pytest.mark.parametrize("bins", [3, [0, 5, 15]]) @pytest.mark.parametrize("right", [True, False]) @pytest.mark.parametrize("include_lowest", [True, False]) def test_cut_nullable_integer(bins, right, include_lowest): a = np.random.randint(0, 10, size=50).astype(float) a[::2] = np.nan result = cut( pd.array(a, dtype="Int64"), bins, right=right, include_lowest=include_lowest ) expected = cut(a, bins, right=right, include_lowest=include_lowest) tm.assert_categorical_equal(result, expected) @pytest.mark.parametrize( "data, bins, labels, expected_codes, expected_labels", [ ([15, 17, 19], [14, 16, 18, 20], ["A", "B", "A"], [0, 1, 0], ["A", "B"]), ([1, 3, 5], [0, 2, 4, 6, 8], [2, 0, 1, 2], [2, 0, 1], [0, 1, 2]), ], ) def test_cut_non_unique_labels(data, bins, labels, expected_codes, expected_labels): # GH 33141 result = cut(data, bins=bins, labels=labels, ordered=False) expected = Categorical.from_codes( expected_codes, categories=expected_labels, ordered=False ) tm.assert_categorical_equal(result, expected) @pytest.mark.parametrize( "data, bins, labels, expected_codes, expected_labels", [ ([15, 17, 19], [14, 16, 18, 20], ["C", "B", "A"], [0, 1, 2], ["C", "B", "A"]), ([1, 3, 5], [0, 2, 4, 6, 8], [3, 0, 1, 2], [0, 1, 2], [3, 0, 1, 2]), ], ) def test_cut_unordered_labels(data, bins, labels, expected_codes, expected_labels): # GH 33141 result = cut(data, bins=bins, labels=labels, ordered=False) expected = Categorical.from_codes( expected_codes, categories=expected_labels, ordered=False ) tm.assert_categorical_equal(result, expected) def test_cut_unordered_with_missing_labels_raises_error(): # GH 33141 msg = "'labels' must be provided if 'ordered = False'" with pytest.raises(ValueError, match=msg): cut([0.5, 3], bins=[0, 1, 2], ordered=False) def test_cut_unordered_with_series_labels(): # https://github.com/pandas-dev/pandas/issues/36603 s = Series([1, 2, 3, 4, 5]) bins = Series([0, 2, 4, 6]) labels = Series(["a", "b", "c"]) result = pd.cut(s, bins=bins, labels=labels, ordered=False) expected = Series(["a", "a", "b", "b", "c"], dtype="category") tm.assert_series_equal(result, expected) def test_cut_no_warnings(): df = DataFrame({"value": np.random.randint(0, 100, 20)}) labels = [f"{i} - {i + 9}" for i in range(0, 100, 10)] with tm.assert_produces_warning(False): df["group"] = pd.cut(df.value, range(0, 105, 10), right=False, labels=labels)
jreback/pandas
pandas/tests/reshape/test_cut.py
pandas/io/excel/_odfreader.py
""" Helpers for configuring locale settings. Name `localization` is chosen to avoid overlap with builtin `locale` module. """ from contextlib import contextmanager import locale import re import subprocess from pandas._config.config import options @contextmanager def set_locale(new_locale, lc_var: int = locale.LC_ALL): """ Context manager for temporarily setting a locale. Parameters ---------- new_locale : str or tuple A string of the form <language_country>.<encoding>. For example to set the current locale to US English with a UTF8 encoding, you would pass "en_US.UTF-8". lc_var : int, default `locale.LC_ALL` The category of the locale being set. Notes ----- This is useful when you want to run a particular block of code under a particular locale, without globally setting the locale. This probably isn't thread-safe. """ current_locale = locale.getlocale() try: locale.setlocale(lc_var, new_locale) normalized_locale = locale.getlocale() if all(x is not None for x in normalized_locale): yield ".".join(normalized_locale) else: yield new_locale finally: locale.setlocale(lc_var, current_locale) def can_set_locale(lc: str, lc_var: int = locale.LC_ALL) -> bool: """ Check to see if we can set a locale, and subsequently get the locale, without raising an Exception. Parameters ---------- lc : str The locale to attempt to set. lc_var : int, default `locale.LC_ALL` The category of the locale being set. Returns ------- bool Whether the passed locale can be set """ try: with set_locale(lc, lc_var=lc_var): pass except (ValueError, locale.Error): # horrible name for a Exception subclass return False else: return True def _valid_locales(locales, normalize): """ Return a list of normalized locales that do not throw an ``Exception`` when set. Parameters ---------- locales : str A string where each locale is separated by a newline. normalize : bool Whether to call ``locale.normalize`` on each locale. Returns ------- valid_locales : list A list of valid locales. """ return [ loc for loc in ( locale.normalize(loc.strip()) if normalize else loc.strip() for loc in locales ) if can_set_locale(loc) ] def _default_locale_getter(): return subprocess.check_output(["locale -a"], shell=True) def get_locales(prefix=None, normalize=True, locale_getter=_default_locale_getter): """ Get all the locales that are available on the system. Parameters ---------- prefix : str If not ``None`` then return only those locales with the prefix provided. For example to get all English language locales (those that start with ``"en"``), pass ``prefix="en"``. normalize : bool Call ``locale.normalize`` on the resulting list of available locales. If ``True``, only locales that can be set without throwing an ``Exception`` are returned. locale_getter : callable The function to use to retrieve the current locales. This should return a string with each locale separated by a newline character. Returns ------- locales : list of strings A list of locale strings that can be set with ``locale.setlocale()``. For example:: locale.setlocale(locale.LC_ALL, locale_string) On error will return None (no locale available, e.g. Windows) """ try: raw_locales = locale_getter() except subprocess.CalledProcessError: # Raised on (some? all?) Windows platforms because Note: "locale -a" # is not defined return None try: # raw_locales is "\n" separated list of locales # it may contain non-decodable parts, so split # extract what we can and then rejoin. raw_locales = raw_locales.split(b"\n") out_locales = [] for x in raw_locales: try: out_locales.append(str(x, encoding=options.display.encoding)) except UnicodeError: # 'locale -a' is used to populated 'raw_locales' and on # Redhat 7 Linux (and maybe others) prints locale names # using windows-1252 encoding. Bug only triggered by # a few special characters and when there is an # extensive list of installed locales. out_locales.append(str(x, encoding="windows-1252")) except TypeError: pass if prefix is None: return _valid_locales(out_locales, normalize) pattern = re.compile(f"{prefix}.*") found = pattern.findall("\n".join(out_locales)) return _valid_locales(found, normalize)
import numpy as np import pytest import pandas as pd from pandas import ( Categorical, DataFrame, DatetimeIndex, Index, Interval, IntervalIndex, Series, TimedeltaIndex, Timestamp, cut, date_range, isna, qcut, timedelta_range, to_datetime, ) import pandas._testing as tm from pandas.api.types import CategoricalDtype as CDT import pandas.core.reshape.tile as tmod def test_simple(): data = np.ones(5, dtype="int64") result = cut(data, 4, labels=False) expected = np.array([1, 1, 1, 1, 1]) tm.assert_numpy_array_equal(result, expected, check_dtype=False) def test_bins(): data = np.array([0.2, 1.4, 2.5, 6.2, 9.7, 2.1]) result, bins = cut(data, 3, retbins=True) intervals = IntervalIndex.from_breaks(bins.round(3)) intervals = intervals.take([0, 0, 0, 1, 2, 0]) expected = Categorical(intervals, ordered=True) tm.assert_categorical_equal(result, expected) tm.assert_almost_equal(bins, np.array([0.1905, 3.36666667, 6.53333333, 9.7])) def test_right(): data = np.array([0.2, 1.4, 2.5, 6.2, 9.7, 2.1, 2.575]) result, bins = cut(data, 4, right=True, retbins=True) intervals = IntervalIndex.from_breaks(bins.round(3)) expected = Categorical(intervals, ordered=True) expected = expected.take([0, 0, 0, 2, 3, 0, 0]) tm.assert_categorical_equal(result, expected) tm.assert_almost_equal(bins, np.array([0.1905, 2.575, 4.95, 7.325, 9.7])) def test_no_right(): data = np.array([0.2, 1.4, 2.5, 6.2, 9.7, 2.1, 2.575]) result, bins = cut(data, 4, right=False, retbins=True) intervals = IntervalIndex.from_breaks(bins.round(3), closed="left") intervals = intervals.take([0, 0, 0, 2, 3, 0, 1]) expected = Categorical(intervals, ordered=True) tm.assert_categorical_equal(result, expected) tm.assert_almost_equal(bins, np.array([0.2, 2.575, 4.95, 7.325, 9.7095])) def test_array_like(): data = [0.2, 1.4, 2.5, 6.2, 9.7, 2.1] result, bins = cut(data, 3, retbins=True) intervals = IntervalIndex.from_breaks(bins.round(3)) intervals = intervals.take([0, 0, 0, 1, 2, 0]) expected = Categorical(intervals, ordered=True) tm.assert_categorical_equal(result, expected) tm.assert_almost_equal(bins, np.array([0.1905, 3.36666667, 6.53333333, 9.7])) def test_bins_from_interval_index(): c = cut(range(5), 3) expected = c result = cut(range(5), bins=expected.categories) tm.assert_categorical_equal(result, expected) expected = Categorical.from_codes( np.append(c.codes, -1), categories=c.categories, ordered=True ) result = cut(range(6), bins=expected.categories) tm.assert_categorical_equal(result, expected) def test_bins_from_interval_index_doc_example(): # Make sure we preserve the bins. ages = np.array([10, 15, 13, 12, 23, 25, 28, 59, 60]) c = cut(ages, bins=[0, 18, 35, 70]) expected = IntervalIndex.from_tuples([(0, 18), (18, 35), (35, 70)]) tm.assert_index_equal(c.categories, expected) result = cut([25, 20, 50], bins=c.categories) tm.assert_index_equal(result.categories, expected) tm.assert_numpy_array_equal(result.codes, np.array([1, 1, 2], dtype="int8")) def test_bins_not_overlapping_from_interval_index(): # see gh-23980 msg = "Overlapping IntervalIndex is not accepted" ii = IntervalIndex.from_tuples([(0, 10), (2, 12), (4, 14)]) with pytest.raises(ValueError, match=msg): cut([5, 6], bins=ii) def test_bins_not_monotonic(): msg = "bins must increase monotonically" data = [0.2, 1.4, 2.5, 6.2, 9.7, 2.1] with pytest.raises(ValueError, match=msg): cut(data, [0.1, 1.5, 1, 10]) @pytest.mark.parametrize( "x, bins, expected", [ ( date_range("2017-12-31", periods=3), [Timestamp.min, Timestamp("2018-01-01"), Timestamp.max], IntervalIndex.from_tuples( [ (Timestamp.min, Timestamp("2018-01-01")), (Timestamp("2018-01-01"), Timestamp.max), ] ), ), ( [-1, 0, 1], np.array( [np.iinfo(np.int64).min, 0, np.iinfo(np.int64).max], dtype="int64" ), IntervalIndex.from_tuples( [(np.iinfo(np.int64).min, 0), (0, np.iinfo(np.int64).max)] ), ), ( [np.timedelta64(-1), np.timedelta64(0), np.timedelta64(1)], np.array( [ np.timedelta64(-np.iinfo(np.int64).max), np.timedelta64(0), np.timedelta64(np.iinfo(np.int64).max), ] ), IntervalIndex.from_tuples( [ (np.timedelta64(-np.iinfo(np.int64).max), np.timedelta64(0)), (np.timedelta64(0), np.timedelta64(np.iinfo(np.int64).max)), ] ), ), ], ) def test_bins_monotonic_not_overflowing(x, bins, expected): # GH 26045 result = cut(x, bins) tm.assert_index_equal(result.categories, expected) def test_wrong_num_labels(): msg = "Bin labels must be one fewer than the number of bin edges" data = [0.2, 1.4, 2.5, 6.2, 9.7, 2.1] with pytest.raises(ValueError, match=msg): cut(data, [0, 1, 10], labels=["foo", "bar", "baz"]) @pytest.mark.parametrize( "x,bins,msg", [ ([], 2, "Cannot cut empty array"), ([1, 2, 3], 0.5, "`bins` should be a positive integer"), ], ) def test_cut_corner(x, bins, msg): with pytest.raises(ValueError, match=msg): cut(x, bins) @pytest.mark.parametrize("arg", [2, np.eye(2), DataFrame(np.eye(2))]) @pytest.mark.parametrize("cut_func", [cut, qcut]) def test_cut_not_1d_arg(arg, cut_func): msg = "Input array must be 1 dimensional" with pytest.raises(ValueError, match=msg): cut_func(arg, 2) @pytest.mark.parametrize( "data", [ [0, 1, 2, 3, 4, np.inf], [-np.inf, 0, 1, 2, 3, 4], [-np.inf, 0, 1, 2, 3, 4, np.inf], ], ) def test_int_bins_with_inf(data): # GH 24314 msg = "cannot specify integer `bins` when input data contains infinity" with pytest.raises(ValueError, match=msg): cut(data, bins=3) def test_cut_out_of_range_more(): # see gh-1511 name = "x" ser = Series([0, -1, 0, 1, -3], name=name) ind = cut(ser, [0, 1], labels=False) exp = Series([np.nan, np.nan, np.nan, 0, np.nan], name=name) tm.assert_series_equal(ind, exp) @pytest.mark.parametrize( "right,breaks,closed", [ (True, [-1e-3, 0.25, 0.5, 0.75, 1], "right"), (False, [0, 0.25, 0.5, 0.75, 1 + 1e-3], "left"), ], ) def test_labels(right, breaks, closed): arr = np.tile(np.arange(0, 1.01, 0.1), 4) result, bins = cut(arr, 4, retbins=True, right=right) ex_levels = IntervalIndex.from_breaks(breaks, closed=closed) tm.assert_index_equal(result.categories, ex_levels) def test_cut_pass_series_name_to_factor(): name = "foo" ser = Series(np.random.randn(100), name=name) factor = cut(ser, 4) assert factor.name == name def test_label_precision(): arr = np.arange(0, 0.73, 0.01) result = cut(arr, 4, precision=2) ex_levels = IntervalIndex.from_breaks([-0.00072, 0.18, 0.36, 0.54, 0.72]) tm.assert_index_equal(result.categories, ex_levels) @pytest.mark.parametrize("labels", [None, False]) def test_na_handling(labels): arr = np.arange(0, 0.75, 0.01) arr[::3] = np.nan result = cut(arr, 4, labels=labels) result = np.asarray(result) expected = np.where(isna(arr), np.nan, result) tm.assert_almost_equal(result, expected) def test_inf_handling(): data = np.arange(6) data_ser = Series(data, dtype="int64") bins = [-np.inf, 2, 4, np.inf] result = cut(data, bins) result_ser = cut(data_ser, bins) ex_uniques = IntervalIndex.from_breaks(bins) tm.assert_index_equal(result.categories, ex_uniques) assert result[5] == Interval(4, np.inf) assert result[0] == Interval(-np.inf, 2) assert result_ser[5] == Interval(4, np.inf) assert result_ser[0] == Interval(-np.inf, 2) def test_cut_out_of_bounds(): arr = np.random.randn(100) result = cut(arr, [-1, 0, 1]) mask = isna(result) ex_mask = (arr < -1) | (arr > 1) tm.assert_numpy_array_equal(mask, ex_mask) @pytest.mark.parametrize( "get_labels,get_expected", [ ( lambda labels: labels, lambda labels: Categorical( ["Medium"] + 4 * ["Small"] + ["Medium", "Large"], categories=labels, ordered=True, ), ), ( lambda labels: Categorical.from_codes([0, 1, 2], labels), lambda labels: Categorical.from_codes([1] + 4 * [0] + [1, 2], labels), ), ], ) def test_cut_pass_labels(get_labels, get_expected): bins = [0, 25, 50, 100] arr = [50, 5, 10, 15, 20, 30, 70] labels = ["Small", "Medium", "Large"] result = cut(arr, bins, labels=get_labels(labels)) tm.assert_categorical_equal(result, get_expected(labels)) def test_cut_pass_labels_compat(): # see gh-16459 arr = [50, 5, 10, 15, 20, 30, 70] labels = ["Good", "Medium", "Bad"] result = cut(arr, 3, labels=labels) exp = cut(arr, 3, labels=Categorical(labels, categories=labels, ordered=True)) tm.assert_categorical_equal(result, exp) @pytest.mark.parametrize("x", [np.arange(11.0), np.arange(11.0) / 1e10]) def test_round_frac_just_works(x): # It works. cut(x, 2) @pytest.mark.parametrize( "val,precision,expected", [ (-117.9998, 3, -118), (117.9998, 3, 118), (117.9998, 2, 118), (0.000123456, 2, 0.00012), ], ) def test_round_frac(val, precision, expected): # see gh-1979 result = tmod._round_frac(val, precision=precision) assert result == expected def test_cut_return_intervals(): ser = Series([0, 1, 2, 3, 4, 5, 6, 7, 8]) result = cut(ser, 3) exp_bins = np.linspace(0, 8, num=4).round(3) exp_bins[0] -= 0.008 expected = Series( IntervalIndex.from_breaks(exp_bins, closed="right").take( [0, 0, 0, 1, 1, 1, 2, 2, 2] ) ).astype(CDT(ordered=True)) tm.assert_series_equal(result, expected) def test_series_ret_bins(): # see gh-8589 ser = Series(np.arange(4)) result, bins = cut(ser, 2, retbins=True) expected = Series( IntervalIndex.from_breaks([-0.003, 1.5, 3], closed="right").repeat(2) ).astype(CDT(ordered=True)) tm.assert_series_equal(result, expected) @pytest.mark.parametrize( "kwargs,msg", [ ({"duplicates": "drop"}, None), ({}, "Bin edges must be unique"), ({"duplicates": "raise"}, "Bin edges must be unique"), ({"duplicates": "foo"}, "invalid value for 'duplicates' parameter"), ], ) def test_cut_duplicates_bin(kwargs, msg): # see gh-20947 bins = [0, 2, 4, 6, 10, 10] values = Series(np.array([1, 3, 5, 7, 9]), index=["a", "b", "c", "d", "e"]) if msg is not None: with pytest.raises(ValueError, match=msg): cut(values, bins, **kwargs) else: result = cut(values, bins, **kwargs) expected = cut(values, pd.unique(bins)) tm.assert_series_equal(result, expected) @pytest.mark.parametrize("data", [9.0, -9.0, 0.0]) @pytest.mark.parametrize("length", [1, 2]) def test_single_bin(data, length): # see gh-14652, gh-15428 ser = Series([data] * length) result = cut(ser, 1, labels=False) expected = Series([0] * length) tm.assert_series_equal(result, expected) @pytest.mark.parametrize( "array_1_writeable,array_2_writeable", [(True, True), (True, False), (False, False)] ) def test_cut_read_only(array_1_writeable, array_2_writeable): # issue 18773 array_1 = np.arange(0, 100, 10) array_1.flags.writeable = array_1_writeable array_2 = np.arange(0, 100, 10) array_2.flags.writeable = array_2_writeable hundred_elements = np.arange(100) tm.assert_categorical_equal( cut(hundred_elements, array_1), cut(hundred_elements, array_2) ) @pytest.mark.parametrize( "conv", [ lambda v: Timestamp(v), lambda v: to_datetime(v), lambda v: np.datetime64(v), lambda v: Timestamp(v).to_pydatetime(), ], ) def test_datetime_bin(conv): data = [np.datetime64("2012-12-13"), np.datetime64("2012-12-15")] bin_data = ["2012-12-12", "2012-12-14", "2012-12-16"] expected = Series( IntervalIndex( [ Interval(Timestamp(bin_data[0]), Timestamp(bin_data[1])), Interval(Timestamp(bin_data[1]), Timestamp(bin_data[2])), ] ) ).astype(CDT(ordered=True)) bins = [conv(v) for v in bin_data] result = Series(cut(data, bins=bins)) tm.assert_series_equal(result, expected) @pytest.mark.parametrize( "data", [ to_datetime(Series(["2013-01-01", "2013-01-02", "2013-01-03"])), [ np.datetime64("2013-01-01"), np.datetime64("2013-01-02"), np.datetime64("2013-01-03"), ], np.array( [ np.datetime64("2013-01-01"), np.datetime64("2013-01-02"), np.datetime64("2013-01-03"), ] ), DatetimeIndex(["2013-01-01", "2013-01-02", "2013-01-03"]), ], ) def test_datetime_cut(data): # see gh-14714 # # Testing time data when it comes in various collection types. result, _ = cut(data, 3, retbins=True) expected = Series( IntervalIndex( [ Interval( Timestamp("2012-12-31 23:57:07.200000"), Timestamp("2013-01-01 16:00:00"), ), Interval( Timestamp("2013-01-01 16:00:00"), Timestamp("2013-01-02 08:00:00") ), Interval( Timestamp("2013-01-02 08:00:00"), Timestamp("2013-01-03 00:00:00") ), ] ) ).astype(CDT(ordered=True)) tm.assert_series_equal(Series(result), expected) @pytest.mark.parametrize( "bins", [ 3, [ Timestamp("2013-01-01 04:57:07.200000"), Timestamp("2013-01-01 21:00:00"), Timestamp("2013-01-02 13:00:00"), Timestamp("2013-01-03 05:00:00"), ], ], ) @pytest.mark.parametrize("box", [list, np.array, Index, Series]) def test_datetime_tz_cut(bins, box): # see gh-19872 tz = "US/Eastern" s = Series(date_range("20130101", periods=3, tz=tz)) if not isinstance(bins, int): bins = box(bins) result = cut(s, bins) expected = Series( IntervalIndex( [ Interval( Timestamp("2012-12-31 23:57:07.200000", tz=tz), Timestamp("2013-01-01 16:00:00", tz=tz), ), Interval( Timestamp("2013-01-01 16:00:00", tz=tz), Timestamp("2013-01-02 08:00:00", tz=tz), ), Interval( Timestamp("2013-01-02 08:00:00", tz=tz), Timestamp("2013-01-03 00:00:00", tz=tz), ), ] ) ).astype(CDT(ordered=True)) tm.assert_series_equal(result, expected) def test_datetime_nan_error(): msg = "bins must be of datetime64 dtype" with pytest.raises(ValueError, match=msg): cut(date_range("20130101", periods=3), bins=[0, 2, 4]) def test_datetime_nan_mask(): result = cut( date_range("20130102", periods=5), bins=date_range("20130101", periods=2) ) mask = result.categories.isna() tm.assert_numpy_array_equal(mask, np.array([False])) mask = result.isna() tm.assert_numpy_array_equal(mask, np.array([False, True, True, True, True])) @pytest.mark.parametrize("tz", [None, "UTC", "US/Pacific"]) def test_datetime_cut_roundtrip(tz): # see gh-19891 ser = Series(date_range("20180101", periods=3, tz=tz)) result, result_bins = cut(ser, 2, retbins=True) expected = cut(ser, result_bins) tm.assert_series_equal(result, expected) expected_bins = DatetimeIndex( ["2017-12-31 23:57:07.200000", "2018-01-02 00:00:00", "2018-01-03 00:00:00"] ) expected_bins = expected_bins.tz_localize(tz) tm.assert_index_equal(result_bins, expected_bins) def test_timedelta_cut_roundtrip(): # see gh-19891 ser = Series(timedelta_range("1day", periods=3)) result, result_bins = cut(ser, 2, retbins=True) expected = cut(ser, result_bins) tm.assert_series_equal(result, expected) expected_bins = TimedeltaIndex( ["0 days 23:57:07.200000", "2 days 00:00:00", "3 days 00:00:00"] ) tm.assert_index_equal(result_bins, expected_bins) @pytest.mark.parametrize("bins", [6, 7]) @pytest.mark.parametrize( "box, compare", [ (Series, tm.assert_series_equal), (np.array, tm.assert_categorical_equal), (list, tm.assert_equal), ], ) def test_cut_bool_coercion_to_int(bins, box, compare): # issue 20303 data_expected = box([0, 1, 1, 0, 1] * 10) data_result = box([False, True, True, False, True] * 10) expected = cut(data_expected, bins, duplicates="drop") result = cut(data_result, bins, duplicates="drop") compare(result, expected) @pytest.mark.parametrize("labels", ["foo", 1, True]) def test_cut_incorrect_labels(labels): # GH 13318 values = range(5) msg = "Bin labels must either be False, None or passed in as a list-like argument" with pytest.raises(ValueError, match=msg): cut(values, 4, labels=labels) @pytest.mark.parametrize("bins", [3, [0, 5, 15]]) @pytest.mark.parametrize("right", [True, False]) @pytest.mark.parametrize("include_lowest", [True, False]) def test_cut_nullable_integer(bins, right, include_lowest): a = np.random.randint(0, 10, size=50).astype(float) a[::2] = np.nan result = cut( pd.array(a, dtype="Int64"), bins, right=right, include_lowest=include_lowest ) expected = cut(a, bins, right=right, include_lowest=include_lowest) tm.assert_categorical_equal(result, expected) @pytest.mark.parametrize( "data, bins, labels, expected_codes, expected_labels", [ ([15, 17, 19], [14, 16, 18, 20], ["A", "B", "A"], [0, 1, 0], ["A", "B"]), ([1, 3, 5], [0, 2, 4, 6, 8], [2, 0, 1, 2], [2, 0, 1], [0, 1, 2]), ], ) def test_cut_non_unique_labels(data, bins, labels, expected_codes, expected_labels): # GH 33141 result = cut(data, bins=bins, labels=labels, ordered=False) expected = Categorical.from_codes( expected_codes, categories=expected_labels, ordered=False ) tm.assert_categorical_equal(result, expected) @pytest.mark.parametrize( "data, bins, labels, expected_codes, expected_labels", [ ([15, 17, 19], [14, 16, 18, 20], ["C", "B", "A"], [0, 1, 2], ["C", "B", "A"]), ([1, 3, 5], [0, 2, 4, 6, 8], [3, 0, 1, 2], [0, 1, 2], [3, 0, 1, 2]), ], ) def test_cut_unordered_labels(data, bins, labels, expected_codes, expected_labels): # GH 33141 result = cut(data, bins=bins, labels=labels, ordered=False) expected = Categorical.from_codes( expected_codes, categories=expected_labels, ordered=False ) tm.assert_categorical_equal(result, expected) def test_cut_unordered_with_missing_labels_raises_error(): # GH 33141 msg = "'labels' must be provided if 'ordered = False'" with pytest.raises(ValueError, match=msg): cut([0.5, 3], bins=[0, 1, 2], ordered=False) def test_cut_unordered_with_series_labels(): # https://github.com/pandas-dev/pandas/issues/36603 s = Series([1, 2, 3, 4, 5]) bins = Series([0, 2, 4, 6]) labels = Series(["a", "b", "c"]) result = pd.cut(s, bins=bins, labels=labels, ordered=False) expected = Series(["a", "a", "b", "b", "c"], dtype="category") tm.assert_series_equal(result, expected) def test_cut_no_warnings(): df = DataFrame({"value": np.random.randint(0, 100, 20)}) labels = [f"{i} - {i + 9}" for i in range(0, 100, 10)] with tm.assert_produces_warning(False): df["group"] = pd.cut(df.value, range(0, 105, 10), right=False, labels=labels)
jreback/pandas
pandas/tests/reshape/test_cut.py
pandas/_config/localization.py
from typing import Optional, Type import pytest import pandas as pd import pandas._testing as tm from pandas.core import ops from .base import BaseExtensionTests class BaseOpsUtil(BaseExtensionTests): def get_op_from_name(self, op_name): return tm.get_op_from_name(op_name) def check_opname(self, s, op_name, other, exc=Exception): op = self.get_op_from_name(op_name) self._check_op(s, op, other, op_name, exc) def _check_op(self, s, op, other, op_name, exc=NotImplementedError): if exc is None: result = op(s, other) if isinstance(s, pd.DataFrame): if len(s.columns) != 1: raise NotImplementedError expected = s.iloc[:, 0].combine(other, op).to_frame() self.assert_frame_equal(result, expected) else: expected = s.combine(other, op) self.assert_series_equal(result, expected) else: with pytest.raises(exc): op(s, other) def _check_divmod_op(self, s, op, other, exc=Exception): # divmod has multiple return values, so check separately if exc is None: result_div, result_mod = op(s, other) if op is divmod: expected_div, expected_mod = s // other, s % other else: expected_div, expected_mod = other // s, other % s self.assert_series_equal(result_div, expected_div) self.assert_series_equal(result_mod, expected_mod) else: with pytest.raises(exc): divmod(s, other) class BaseArithmeticOpsTests(BaseOpsUtil): """ Various Series and DataFrame arithmetic ops methods. Subclasses supporting various ops should set the class variables to indicate that they support ops of that kind * series_scalar_exc = TypeError * frame_scalar_exc = TypeError * series_array_exc = TypeError * divmod_exc = TypeError """ series_scalar_exc: Optional[Type[TypeError]] = TypeError frame_scalar_exc: Optional[Type[TypeError]] = TypeError series_array_exc: Optional[Type[TypeError]] = TypeError divmod_exc: Optional[Type[TypeError]] = TypeError def test_arith_series_with_scalar(self, data, all_arithmetic_operators): # series & scalar op_name = all_arithmetic_operators s = pd.Series(data) self.check_opname(s, op_name, s.iloc[0], exc=self.series_scalar_exc) @pytest.mark.xfail(run=False, reason="_reduce needs implementation") def test_arith_frame_with_scalar(self, data, all_arithmetic_operators): # frame & scalar op_name = all_arithmetic_operators df = pd.DataFrame({"A": data}) self.check_opname(df, op_name, data[0], exc=self.frame_scalar_exc) def test_arith_series_with_array(self, data, all_arithmetic_operators): # ndarray & other series op_name = all_arithmetic_operators s = pd.Series(data) self.check_opname( s, op_name, pd.Series([s.iloc[0]] * len(s)), exc=self.series_array_exc ) def test_divmod(self, data): s = pd.Series(data) self._check_divmod_op(s, divmod, 1, exc=self.divmod_exc) self._check_divmod_op(1, ops.rdivmod, s, exc=self.divmod_exc) def test_divmod_series_array(self, data, data_for_twos): s = pd.Series(data) self._check_divmod_op(s, divmod, data) other = data_for_twos self._check_divmod_op(other, ops.rdivmod, s) other = pd.Series(other) self._check_divmod_op(other, ops.rdivmod, s) def test_add_series_with_extension_array(self, data): s = pd.Series(data) result = s + data expected = pd.Series(data + data) self.assert_series_equal(result, expected) def test_error(self, data, all_arithmetic_operators): # invalid ops op_name = all_arithmetic_operators with pytest.raises(AttributeError): getattr(data, op_name) @pytest.mark.parametrize("box", [pd.Series, pd.DataFrame]) def test_direct_arith_with_ndframe_returns_not_implemented(self, data, box): # EAs should return NotImplemented for ops with Series/DataFrame # Pandas takes care of unboxing the series and calling the EA's op. other = pd.Series(data) if box is pd.DataFrame: other = other.to_frame() if hasattr(data, "__add__"): result = data.__add__(other) assert result is NotImplemented else: raise pytest.skip(f"{type(data).__name__} does not implement add") class BaseComparisonOpsTests(BaseOpsUtil): """Various Series and DataFrame comparison ops methods.""" def _compare_other(self, s, data, op_name, other): op = self.get_op_from_name(op_name) if op_name == "__eq__": assert not op(s, other).all() elif op_name == "__ne__": assert op(s, other).all() else: # array assert getattr(data, op_name)(other) is NotImplemented # series s = pd.Series(data) with pytest.raises(TypeError): op(s, other) def test_compare_scalar(self, data, all_compare_operators): op_name = all_compare_operators s = pd.Series(data) self._compare_other(s, data, op_name, 0) def test_compare_array(self, data, all_compare_operators): op_name = all_compare_operators s = pd.Series(data) other = pd.Series([data[0]] * len(data)) self._compare_other(s, data, op_name, other) @pytest.mark.parametrize("box", [pd.Series, pd.DataFrame]) def test_direct_arith_with_ndframe_returns_not_implemented(self, data, box): # EAs should return NotImplemented for ops with Series/DataFrame # Pandas takes care of unboxing the series and calling the EA's op. other = pd.Series(data) if box is pd.DataFrame: other = other.to_frame() if hasattr(data, "__eq__"): result = data.__eq__(other) assert result is NotImplemented else: raise pytest.skip(f"{type(data).__name__} does not implement __eq__") if hasattr(data, "__ne__"): result = data.__ne__(other) assert result is NotImplemented else: raise pytest.skip(f"{type(data).__name__} does not implement __ne__") class BaseUnaryOpsTests(BaseOpsUtil): def test_invert(self, data): s = pd.Series(data, name="name") result = ~s expected = pd.Series(~data, name="name") self.assert_series_equal(result, expected)
import numpy as np import pytest import pandas as pd from pandas import ( Categorical, DataFrame, DatetimeIndex, Index, Interval, IntervalIndex, Series, TimedeltaIndex, Timestamp, cut, date_range, isna, qcut, timedelta_range, to_datetime, ) import pandas._testing as tm from pandas.api.types import CategoricalDtype as CDT import pandas.core.reshape.tile as tmod def test_simple(): data = np.ones(5, dtype="int64") result = cut(data, 4, labels=False) expected = np.array([1, 1, 1, 1, 1]) tm.assert_numpy_array_equal(result, expected, check_dtype=False) def test_bins(): data = np.array([0.2, 1.4, 2.5, 6.2, 9.7, 2.1]) result, bins = cut(data, 3, retbins=True) intervals = IntervalIndex.from_breaks(bins.round(3)) intervals = intervals.take([0, 0, 0, 1, 2, 0]) expected = Categorical(intervals, ordered=True) tm.assert_categorical_equal(result, expected) tm.assert_almost_equal(bins, np.array([0.1905, 3.36666667, 6.53333333, 9.7])) def test_right(): data = np.array([0.2, 1.4, 2.5, 6.2, 9.7, 2.1, 2.575]) result, bins = cut(data, 4, right=True, retbins=True) intervals = IntervalIndex.from_breaks(bins.round(3)) expected = Categorical(intervals, ordered=True) expected = expected.take([0, 0, 0, 2, 3, 0, 0]) tm.assert_categorical_equal(result, expected) tm.assert_almost_equal(bins, np.array([0.1905, 2.575, 4.95, 7.325, 9.7])) def test_no_right(): data = np.array([0.2, 1.4, 2.5, 6.2, 9.7, 2.1, 2.575]) result, bins = cut(data, 4, right=False, retbins=True) intervals = IntervalIndex.from_breaks(bins.round(3), closed="left") intervals = intervals.take([0, 0, 0, 2, 3, 0, 1]) expected = Categorical(intervals, ordered=True) tm.assert_categorical_equal(result, expected) tm.assert_almost_equal(bins, np.array([0.2, 2.575, 4.95, 7.325, 9.7095])) def test_array_like(): data = [0.2, 1.4, 2.5, 6.2, 9.7, 2.1] result, bins = cut(data, 3, retbins=True) intervals = IntervalIndex.from_breaks(bins.round(3)) intervals = intervals.take([0, 0, 0, 1, 2, 0]) expected = Categorical(intervals, ordered=True) tm.assert_categorical_equal(result, expected) tm.assert_almost_equal(bins, np.array([0.1905, 3.36666667, 6.53333333, 9.7])) def test_bins_from_interval_index(): c = cut(range(5), 3) expected = c result = cut(range(5), bins=expected.categories) tm.assert_categorical_equal(result, expected) expected = Categorical.from_codes( np.append(c.codes, -1), categories=c.categories, ordered=True ) result = cut(range(6), bins=expected.categories) tm.assert_categorical_equal(result, expected) def test_bins_from_interval_index_doc_example(): # Make sure we preserve the bins. ages = np.array([10, 15, 13, 12, 23, 25, 28, 59, 60]) c = cut(ages, bins=[0, 18, 35, 70]) expected = IntervalIndex.from_tuples([(0, 18), (18, 35), (35, 70)]) tm.assert_index_equal(c.categories, expected) result = cut([25, 20, 50], bins=c.categories) tm.assert_index_equal(result.categories, expected) tm.assert_numpy_array_equal(result.codes, np.array([1, 1, 2], dtype="int8")) def test_bins_not_overlapping_from_interval_index(): # see gh-23980 msg = "Overlapping IntervalIndex is not accepted" ii = IntervalIndex.from_tuples([(0, 10), (2, 12), (4, 14)]) with pytest.raises(ValueError, match=msg): cut([5, 6], bins=ii) def test_bins_not_monotonic(): msg = "bins must increase monotonically" data = [0.2, 1.4, 2.5, 6.2, 9.7, 2.1] with pytest.raises(ValueError, match=msg): cut(data, [0.1, 1.5, 1, 10]) @pytest.mark.parametrize( "x, bins, expected", [ ( date_range("2017-12-31", periods=3), [Timestamp.min, Timestamp("2018-01-01"), Timestamp.max], IntervalIndex.from_tuples( [ (Timestamp.min, Timestamp("2018-01-01")), (Timestamp("2018-01-01"), Timestamp.max), ] ), ), ( [-1, 0, 1], np.array( [np.iinfo(np.int64).min, 0, np.iinfo(np.int64).max], dtype="int64" ), IntervalIndex.from_tuples( [(np.iinfo(np.int64).min, 0), (0, np.iinfo(np.int64).max)] ), ), ( [np.timedelta64(-1), np.timedelta64(0), np.timedelta64(1)], np.array( [ np.timedelta64(-np.iinfo(np.int64).max), np.timedelta64(0), np.timedelta64(np.iinfo(np.int64).max), ] ), IntervalIndex.from_tuples( [ (np.timedelta64(-np.iinfo(np.int64).max), np.timedelta64(0)), (np.timedelta64(0), np.timedelta64(np.iinfo(np.int64).max)), ] ), ), ], ) def test_bins_monotonic_not_overflowing(x, bins, expected): # GH 26045 result = cut(x, bins) tm.assert_index_equal(result.categories, expected) def test_wrong_num_labels(): msg = "Bin labels must be one fewer than the number of bin edges" data = [0.2, 1.4, 2.5, 6.2, 9.7, 2.1] with pytest.raises(ValueError, match=msg): cut(data, [0, 1, 10], labels=["foo", "bar", "baz"]) @pytest.mark.parametrize( "x,bins,msg", [ ([], 2, "Cannot cut empty array"), ([1, 2, 3], 0.5, "`bins` should be a positive integer"), ], ) def test_cut_corner(x, bins, msg): with pytest.raises(ValueError, match=msg): cut(x, bins) @pytest.mark.parametrize("arg", [2, np.eye(2), DataFrame(np.eye(2))]) @pytest.mark.parametrize("cut_func", [cut, qcut]) def test_cut_not_1d_arg(arg, cut_func): msg = "Input array must be 1 dimensional" with pytest.raises(ValueError, match=msg): cut_func(arg, 2) @pytest.mark.parametrize( "data", [ [0, 1, 2, 3, 4, np.inf], [-np.inf, 0, 1, 2, 3, 4], [-np.inf, 0, 1, 2, 3, 4, np.inf], ], ) def test_int_bins_with_inf(data): # GH 24314 msg = "cannot specify integer `bins` when input data contains infinity" with pytest.raises(ValueError, match=msg): cut(data, bins=3) def test_cut_out_of_range_more(): # see gh-1511 name = "x" ser = Series([0, -1, 0, 1, -3], name=name) ind = cut(ser, [0, 1], labels=False) exp = Series([np.nan, np.nan, np.nan, 0, np.nan], name=name) tm.assert_series_equal(ind, exp) @pytest.mark.parametrize( "right,breaks,closed", [ (True, [-1e-3, 0.25, 0.5, 0.75, 1], "right"), (False, [0, 0.25, 0.5, 0.75, 1 + 1e-3], "left"), ], ) def test_labels(right, breaks, closed): arr = np.tile(np.arange(0, 1.01, 0.1), 4) result, bins = cut(arr, 4, retbins=True, right=right) ex_levels = IntervalIndex.from_breaks(breaks, closed=closed) tm.assert_index_equal(result.categories, ex_levels) def test_cut_pass_series_name_to_factor(): name = "foo" ser = Series(np.random.randn(100), name=name) factor = cut(ser, 4) assert factor.name == name def test_label_precision(): arr = np.arange(0, 0.73, 0.01) result = cut(arr, 4, precision=2) ex_levels = IntervalIndex.from_breaks([-0.00072, 0.18, 0.36, 0.54, 0.72]) tm.assert_index_equal(result.categories, ex_levels) @pytest.mark.parametrize("labels", [None, False]) def test_na_handling(labels): arr = np.arange(0, 0.75, 0.01) arr[::3] = np.nan result = cut(arr, 4, labels=labels) result = np.asarray(result) expected = np.where(isna(arr), np.nan, result) tm.assert_almost_equal(result, expected) def test_inf_handling(): data = np.arange(6) data_ser = Series(data, dtype="int64") bins = [-np.inf, 2, 4, np.inf] result = cut(data, bins) result_ser = cut(data_ser, bins) ex_uniques = IntervalIndex.from_breaks(bins) tm.assert_index_equal(result.categories, ex_uniques) assert result[5] == Interval(4, np.inf) assert result[0] == Interval(-np.inf, 2) assert result_ser[5] == Interval(4, np.inf) assert result_ser[0] == Interval(-np.inf, 2) def test_cut_out_of_bounds(): arr = np.random.randn(100) result = cut(arr, [-1, 0, 1]) mask = isna(result) ex_mask = (arr < -1) | (arr > 1) tm.assert_numpy_array_equal(mask, ex_mask) @pytest.mark.parametrize( "get_labels,get_expected", [ ( lambda labels: labels, lambda labels: Categorical( ["Medium"] + 4 * ["Small"] + ["Medium", "Large"], categories=labels, ordered=True, ), ), ( lambda labels: Categorical.from_codes([0, 1, 2], labels), lambda labels: Categorical.from_codes([1] + 4 * [0] + [1, 2], labels), ), ], ) def test_cut_pass_labels(get_labels, get_expected): bins = [0, 25, 50, 100] arr = [50, 5, 10, 15, 20, 30, 70] labels = ["Small", "Medium", "Large"] result = cut(arr, bins, labels=get_labels(labels)) tm.assert_categorical_equal(result, get_expected(labels)) def test_cut_pass_labels_compat(): # see gh-16459 arr = [50, 5, 10, 15, 20, 30, 70] labels = ["Good", "Medium", "Bad"] result = cut(arr, 3, labels=labels) exp = cut(arr, 3, labels=Categorical(labels, categories=labels, ordered=True)) tm.assert_categorical_equal(result, exp) @pytest.mark.parametrize("x", [np.arange(11.0), np.arange(11.0) / 1e10]) def test_round_frac_just_works(x): # It works. cut(x, 2) @pytest.mark.parametrize( "val,precision,expected", [ (-117.9998, 3, -118), (117.9998, 3, 118), (117.9998, 2, 118), (0.000123456, 2, 0.00012), ], ) def test_round_frac(val, precision, expected): # see gh-1979 result = tmod._round_frac(val, precision=precision) assert result == expected def test_cut_return_intervals(): ser = Series([0, 1, 2, 3, 4, 5, 6, 7, 8]) result = cut(ser, 3) exp_bins = np.linspace(0, 8, num=4).round(3) exp_bins[0] -= 0.008 expected = Series( IntervalIndex.from_breaks(exp_bins, closed="right").take( [0, 0, 0, 1, 1, 1, 2, 2, 2] ) ).astype(CDT(ordered=True)) tm.assert_series_equal(result, expected) def test_series_ret_bins(): # see gh-8589 ser = Series(np.arange(4)) result, bins = cut(ser, 2, retbins=True) expected = Series( IntervalIndex.from_breaks([-0.003, 1.5, 3], closed="right").repeat(2) ).astype(CDT(ordered=True)) tm.assert_series_equal(result, expected) @pytest.mark.parametrize( "kwargs,msg", [ ({"duplicates": "drop"}, None), ({}, "Bin edges must be unique"), ({"duplicates": "raise"}, "Bin edges must be unique"), ({"duplicates": "foo"}, "invalid value for 'duplicates' parameter"), ], ) def test_cut_duplicates_bin(kwargs, msg): # see gh-20947 bins = [0, 2, 4, 6, 10, 10] values = Series(np.array([1, 3, 5, 7, 9]), index=["a", "b", "c", "d", "e"]) if msg is not None: with pytest.raises(ValueError, match=msg): cut(values, bins, **kwargs) else: result = cut(values, bins, **kwargs) expected = cut(values, pd.unique(bins)) tm.assert_series_equal(result, expected) @pytest.mark.parametrize("data", [9.0, -9.0, 0.0]) @pytest.mark.parametrize("length", [1, 2]) def test_single_bin(data, length): # see gh-14652, gh-15428 ser = Series([data] * length) result = cut(ser, 1, labels=False) expected = Series([0] * length) tm.assert_series_equal(result, expected) @pytest.mark.parametrize( "array_1_writeable,array_2_writeable", [(True, True), (True, False), (False, False)] ) def test_cut_read_only(array_1_writeable, array_2_writeable): # issue 18773 array_1 = np.arange(0, 100, 10) array_1.flags.writeable = array_1_writeable array_2 = np.arange(0, 100, 10) array_2.flags.writeable = array_2_writeable hundred_elements = np.arange(100) tm.assert_categorical_equal( cut(hundred_elements, array_1), cut(hundred_elements, array_2) ) @pytest.mark.parametrize( "conv", [ lambda v: Timestamp(v), lambda v: to_datetime(v), lambda v: np.datetime64(v), lambda v: Timestamp(v).to_pydatetime(), ], ) def test_datetime_bin(conv): data = [np.datetime64("2012-12-13"), np.datetime64("2012-12-15")] bin_data = ["2012-12-12", "2012-12-14", "2012-12-16"] expected = Series( IntervalIndex( [ Interval(Timestamp(bin_data[0]), Timestamp(bin_data[1])), Interval(Timestamp(bin_data[1]), Timestamp(bin_data[2])), ] ) ).astype(CDT(ordered=True)) bins = [conv(v) for v in bin_data] result = Series(cut(data, bins=bins)) tm.assert_series_equal(result, expected) @pytest.mark.parametrize( "data", [ to_datetime(Series(["2013-01-01", "2013-01-02", "2013-01-03"])), [ np.datetime64("2013-01-01"), np.datetime64("2013-01-02"), np.datetime64("2013-01-03"), ], np.array( [ np.datetime64("2013-01-01"), np.datetime64("2013-01-02"), np.datetime64("2013-01-03"), ] ), DatetimeIndex(["2013-01-01", "2013-01-02", "2013-01-03"]), ], ) def test_datetime_cut(data): # see gh-14714 # # Testing time data when it comes in various collection types. result, _ = cut(data, 3, retbins=True) expected = Series( IntervalIndex( [ Interval( Timestamp("2012-12-31 23:57:07.200000"), Timestamp("2013-01-01 16:00:00"), ), Interval( Timestamp("2013-01-01 16:00:00"), Timestamp("2013-01-02 08:00:00") ), Interval( Timestamp("2013-01-02 08:00:00"), Timestamp("2013-01-03 00:00:00") ), ] ) ).astype(CDT(ordered=True)) tm.assert_series_equal(Series(result), expected) @pytest.mark.parametrize( "bins", [ 3, [ Timestamp("2013-01-01 04:57:07.200000"), Timestamp("2013-01-01 21:00:00"), Timestamp("2013-01-02 13:00:00"), Timestamp("2013-01-03 05:00:00"), ], ], ) @pytest.mark.parametrize("box", [list, np.array, Index, Series]) def test_datetime_tz_cut(bins, box): # see gh-19872 tz = "US/Eastern" s = Series(date_range("20130101", periods=3, tz=tz)) if not isinstance(bins, int): bins = box(bins) result = cut(s, bins) expected = Series( IntervalIndex( [ Interval( Timestamp("2012-12-31 23:57:07.200000", tz=tz), Timestamp("2013-01-01 16:00:00", tz=tz), ), Interval( Timestamp("2013-01-01 16:00:00", tz=tz), Timestamp("2013-01-02 08:00:00", tz=tz), ), Interval( Timestamp("2013-01-02 08:00:00", tz=tz), Timestamp("2013-01-03 00:00:00", tz=tz), ), ] ) ).astype(CDT(ordered=True)) tm.assert_series_equal(result, expected) def test_datetime_nan_error(): msg = "bins must be of datetime64 dtype" with pytest.raises(ValueError, match=msg): cut(date_range("20130101", periods=3), bins=[0, 2, 4]) def test_datetime_nan_mask(): result = cut( date_range("20130102", periods=5), bins=date_range("20130101", periods=2) ) mask = result.categories.isna() tm.assert_numpy_array_equal(mask, np.array([False])) mask = result.isna() tm.assert_numpy_array_equal(mask, np.array([False, True, True, True, True])) @pytest.mark.parametrize("tz", [None, "UTC", "US/Pacific"]) def test_datetime_cut_roundtrip(tz): # see gh-19891 ser = Series(date_range("20180101", periods=3, tz=tz)) result, result_bins = cut(ser, 2, retbins=True) expected = cut(ser, result_bins) tm.assert_series_equal(result, expected) expected_bins = DatetimeIndex( ["2017-12-31 23:57:07.200000", "2018-01-02 00:00:00", "2018-01-03 00:00:00"] ) expected_bins = expected_bins.tz_localize(tz) tm.assert_index_equal(result_bins, expected_bins) def test_timedelta_cut_roundtrip(): # see gh-19891 ser = Series(timedelta_range("1day", periods=3)) result, result_bins = cut(ser, 2, retbins=True) expected = cut(ser, result_bins) tm.assert_series_equal(result, expected) expected_bins = TimedeltaIndex( ["0 days 23:57:07.200000", "2 days 00:00:00", "3 days 00:00:00"] ) tm.assert_index_equal(result_bins, expected_bins) @pytest.mark.parametrize("bins", [6, 7]) @pytest.mark.parametrize( "box, compare", [ (Series, tm.assert_series_equal), (np.array, tm.assert_categorical_equal), (list, tm.assert_equal), ], ) def test_cut_bool_coercion_to_int(bins, box, compare): # issue 20303 data_expected = box([0, 1, 1, 0, 1] * 10) data_result = box([False, True, True, False, True] * 10) expected = cut(data_expected, bins, duplicates="drop") result = cut(data_result, bins, duplicates="drop") compare(result, expected) @pytest.mark.parametrize("labels", ["foo", 1, True]) def test_cut_incorrect_labels(labels): # GH 13318 values = range(5) msg = "Bin labels must either be False, None or passed in as a list-like argument" with pytest.raises(ValueError, match=msg): cut(values, 4, labels=labels) @pytest.mark.parametrize("bins", [3, [0, 5, 15]]) @pytest.mark.parametrize("right", [True, False]) @pytest.mark.parametrize("include_lowest", [True, False]) def test_cut_nullable_integer(bins, right, include_lowest): a = np.random.randint(0, 10, size=50).astype(float) a[::2] = np.nan result = cut( pd.array(a, dtype="Int64"), bins, right=right, include_lowest=include_lowest ) expected = cut(a, bins, right=right, include_lowest=include_lowest) tm.assert_categorical_equal(result, expected) @pytest.mark.parametrize( "data, bins, labels, expected_codes, expected_labels", [ ([15, 17, 19], [14, 16, 18, 20], ["A", "B", "A"], [0, 1, 0], ["A", "B"]), ([1, 3, 5], [0, 2, 4, 6, 8], [2, 0, 1, 2], [2, 0, 1], [0, 1, 2]), ], ) def test_cut_non_unique_labels(data, bins, labels, expected_codes, expected_labels): # GH 33141 result = cut(data, bins=bins, labels=labels, ordered=False) expected = Categorical.from_codes( expected_codes, categories=expected_labels, ordered=False ) tm.assert_categorical_equal(result, expected) @pytest.mark.parametrize( "data, bins, labels, expected_codes, expected_labels", [ ([15, 17, 19], [14, 16, 18, 20], ["C", "B", "A"], [0, 1, 2], ["C", "B", "A"]), ([1, 3, 5], [0, 2, 4, 6, 8], [3, 0, 1, 2], [0, 1, 2], [3, 0, 1, 2]), ], ) def test_cut_unordered_labels(data, bins, labels, expected_codes, expected_labels): # GH 33141 result = cut(data, bins=bins, labels=labels, ordered=False) expected = Categorical.from_codes( expected_codes, categories=expected_labels, ordered=False ) tm.assert_categorical_equal(result, expected) def test_cut_unordered_with_missing_labels_raises_error(): # GH 33141 msg = "'labels' must be provided if 'ordered = False'" with pytest.raises(ValueError, match=msg): cut([0.5, 3], bins=[0, 1, 2], ordered=False) def test_cut_unordered_with_series_labels(): # https://github.com/pandas-dev/pandas/issues/36603 s = Series([1, 2, 3, 4, 5]) bins = Series([0, 2, 4, 6]) labels = Series(["a", "b", "c"]) result = pd.cut(s, bins=bins, labels=labels, ordered=False) expected = Series(["a", "a", "b", "b", "c"], dtype="category") tm.assert_series_equal(result, expected) def test_cut_no_warnings(): df = DataFrame({"value": np.random.randint(0, 100, 20)}) labels = [f"{i} - {i + 9}" for i in range(0, 100, 10)] with tm.assert_produces_warning(False): df["group"] = pd.cut(df.value, range(0, 105, 10), right=False, labels=labels)
jreback/pandas
pandas/tests/reshape/test_cut.py
pandas/tests/extension/base/ops.py
from __future__ import division, print_function, absolute_import import os import glob import itertools import tempfile import shutil from functools import partial from collections import Iterator from datetime import datetime, date import pandas as pd import toolz from toolz.curried import get, map, memoize from toolz import pipe, concat, curry from pyspark import RDD, SQLContext, HiveContext from pyspark.sql import SchemaRDD from pyspark.rdd import PipelinedRDD import datashape from datashape import dshape, Record, DataShape, Option, Tuple from datashape.predicates import isdimension, isrecord, iscollection from .. import append, discover, convert from ..core import ooc_types from ..directory import Directory from ..temp import Temp from ..chunks import chunks from .json import JSONLines, JSON from .csv import CSV from pyspark.sql import DataFrame as SparkDataFrame from pyspark.sql.types import ( ByteType, ShortType, IntegerType, LongType, FloatType, DoubleType, StringType, BinaryType, BooleanType, TimestampType, DateType, ArrayType, StructType, StructField ) base = int, float, datetime, date, bool, str _names = ('tmp%d' % i for i in itertools.count()) @append.register(SQLContext, object) def iterable_to_sql_context(ctx, seq, **kwargs): return append(ctx, append(ctx._sc, seq, **kwargs), **kwargs) def register_table(ctx, srdd, name=None): if name is None: name = next(_names) ctx.registerDataFrameAsTable(srdd, name) @append.register(SQLContext, (JSONLines, Directory(JSONLines))) def jsonlines_to_sparksql(ctx, json, dshape=None, name=None, schema=None, samplingRatio=0.25, **kwargs): # if we're passing in schema, assume that we know what we're doing and # bypass any automated dshape inference if dshape is not None and schema is None: schema = dshape_to_schema(dshape.measure if isrecord(dshape.measure) else dshape) srdd = ctx.jsonFile(json.path, schema=schema, samplingRatio=samplingRatio) register_table(ctx, srdd, name=name) return srdd @convert.register(list, (SparkDataFrame, SchemaRDD), cost=200.0) def sparksql_dataframe_to_list(df, dshape=None, **kwargs): result = df.collect() if (dshape is not None and iscollection(dshape) and not isrecord(dshape.measure)): return list(map(get(0), result)) return result @convert.register(base, (SparkDataFrame, SchemaRDD), cost=200.0) def spark_df_to_base(df, **kwargs): return df.collect()[0][0] @append.register(SQLContext, RDD) def rdd_to_sqlcontext(ctx, rdd, name=None, dshape=None, **kwargs): """ Convert a normal PySpark RDD to a SparkSQL RDD or Spark DataFrame Schema inferred by ds_to_sparksql. Can also specify it explicitly with schema keyword argument. """ # TODO: assumes that we don't have e.g., 10 * 10 * {x: int, y: int} if isdimension(dshape.parameters[0]): dshape = dshape.measure sql_schema = dshape_to_schema(dshape) sdf = ctx.applySchema(rdd, sql_schema) if name is None: name = next(_names) register_table(ctx, sdf, name=name) ctx.cacheTable(name) return sdf def scala_set_to_set(ctx, x): from py4j.java_gateway import java_import # import scala java_import(ctx._jvm, 'scala') # grab Scala's set converter and convert to a Python set return set(ctx._jvm.scala.collection.JavaConversions.setAsJavaSet(x)) @discover.register(SQLContext) def discover_sqlcontext(ctx): table_names = sorted(map(str, ctx.tableNames())) dshapes = zip(table_names, map(discover, map(ctx.table, table_names))) return datashape.DataShape(datashape.Record(dshapes)) @discover.register((SparkDataFrame, SchemaRDD)) def discover_spark_data_frame(df): schema = df.schema() if callable(df.schema) else df.schema return datashape.var * schema_to_dshape(schema) def chunk_file(filename, chunksize): """Stream `filename` in chunks of size `chunksize`. Parameters ---------- filename : str File to chunk chunksize : int Number of bytes to hold in memory at a single time """ with open(filename, mode='rb') as f: for chunk in iter(partial(f.read, chunksize), b''): yield chunk @append.register(JSONLines, (SparkDataFrame, SchemaRDD)) def spark_df_to_jsonlines(js, df, pattern='part-*', chunksize=1 << 23, # 8MB **kwargs): tmpd = tempfile.mkdtemp() try: try: df.save(tmpd, source='org.apache.spark.sql.json', mode='overwrite') except AttributeError: shutil.rmtree(tmpd) df.toJSON().saveAsTextFile(tmpd) except: raise else: files = glob.glob(os.path.join(tmpd, pattern)) with open(js.path, mode='ab') as f: pipe(files, map(curry(chunk_file, chunksize=chunksize)), concat, map(f.write), toolz.count) finally: shutil.rmtree(tmpd) return js @convert.register((SparkDataFrame, SchemaRDD), (RDD, PipelinedRDD)) def rdd_to_spark_df_or_srdd(rdd, **kwargs): return append(HiveContext(rdd.context), rdd, **kwargs) try: from .hdfs import HDFS except ImportError: pass else: @append.register(HDFS(JSONLines), (Iterator, object, SparkDataFrame, SchemaRDD)) @append.register(HDFS(JSON), (list, object)) @append.register(HDFS(CSV), (chunks(pd.DataFrame), pd.DataFrame, object)) def append_spark_to_hdfs(target, source, **kwargs): tmp = convert(Temp(target.subtype), source, **kwargs) return append(target, tmp, **kwargs) def dshape_to_schema(ds): """Convert datashape to SparkSQL type system. Examples -------- >>> print(dshape_to_schema('int32')) # doctest: +SKIP IntegerType >>> print(dshape_to_schema('5 * int32') # doctest: +SKIP ArrayType(IntegerType,false) >>> print(dshape_to_schema('5 * ?int32')) # doctest: +SKIP ArrayType(IntegerType,true) >>> print(dshape_to_schema('{name: string, amount: int32}')) # doctest: +SKIP StructType(List(StructField(name,StringType,false),StructField(amount,IntegerType,false) # doctest: +SKIP)) >>> print(dshape_to_schema('10 * {name: string, amount: ?int32}')) # doctest: +SKIP ArrayType(StructType(List(StructField(name,StringType,false),StructField(amount,IntegerType,true))),false) """ if isinstance(ds, str): return dshape_to_schema(dshape(ds)) if isinstance(ds, Tuple): raise TypeError('Please provide a Record dshape for these column ' 'types: %s' % (ds.dshapes,)) if isinstance(ds, Record): return StructType([ StructField(name, dshape_to_schema(deoption(typ)), isinstance(typ, datashape.Option)) for name, typ in ds.fields]) if isinstance(ds, DataShape): if isdimension(ds[0]): elem = ds.subshape[0] if isinstance(elem, DataShape) and len(elem) == 1: elem = elem[0] return ArrayType(dshape_to_schema(deoption(elem)), isinstance(elem, Option)) else: return dshape_to_schema(ds[0]) if ds in dshape_to_sparksql: return dshape_to_sparksql[ds] raise NotImplementedError() def schema_to_dshape(schema): if type(schema) in sparksql_to_dshape: return sparksql_to_dshape[type(schema)] if isinstance(schema, ArrayType): dshape = schema_to_dshape(schema.elementType) return datashape.var * (Option(dshape) if schema.containsNull else dshape) if isinstance(schema, StructType): fields = [(field.name, Option(schema_to_dshape(field.dataType)) if field.nullable else schema_to_dshape(field.dataType)) for field in schema.fields] return datashape.dshape(Record(fields)) raise NotImplementedError('SparkSQL type not known %r' % type(schema).__name__) def deoption(ds): """ >>> deoption('int32') ctype("int32") >>> deoption('?int32') ctype("int32") """ if isinstance(ds, str): ds = dshape(ds) if isinstance(ds, DataShape) and not isdimension(ds[0]): return deoption(ds[0]) if isinstance(ds, Option): return ds.ty else: return ds # see http://spark.apache.org/docs/latest/sql-programming-guide.html#spark-sql-datatype-reference sparksql_to_dshape = { ByteType: datashape.int8, ShortType: datashape.int16, IntegerType: datashape.int32, LongType: datashape.int64, FloatType: datashape.float32, DoubleType: datashape.float64, StringType: datashape.string, BinaryType: datashape.bytes_, BooleanType: datashape.bool_, TimestampType: datashape.datetime_, DateType: datashape.date_, # sql.ArrayType: ?, # sql.MapTYpe: ?, # sql.StructType: ? } dshape_to_sparksql = { datashape.int16: ShortType(), datashape.int32: IntegerType(), datashape.int64: LongType(), datashape.float32: FloatType(), datashape.float64: DoubleType(), datashape.real: DoubleType(), datashape.time_: TimestampType(), datashape.date_: DateType(), datashape.datetime_: TimestampType(), datashape.bool_: BooleanType(), datashape.string: StringType() } ooc_types |= set([SparkDataFrame, SchemaRDD]) SQLContext = memoize(SQLContext) HiveContext = memoize(HiveContext)
from __future__ import absolute_import, division, print_function import pytest sas7bdat = pytest.importorskip('sas7bdat') pytest.importorskip('odo.backends.sas') import os import pandas as pd from collections import Iterator from sas7bdat import SAS7BDAT from odo.backends.sas import discover, sas_to_iterator from odo.utils import tmpfile, into_path from odo import append, convert, resource, dshape test_path = into_path('backends', 'tests', 'airline.sas7bdat') sasfile = SAS7BDAT(test_path) columns = ("DATE", "AIR", "mon1", "mon2", "mon3", "mon4", "mon5", "mon6", "mon7", "mon8", "mon9", "mon10", "mon11", "mon12", "t", "Lair") ds = dshape('''var * {DATE: date, AIR: float64, mon1: float64, mon2: float64, mon3: float64, mon4: float64, mon5: float64, mon6: float64, mon7: float64, mon8: float64, mon9: float64, mon10: float64, mon11: float64, mon12: float64, t: float64, Lair: float64}''') def test_resource_sas7bdat(): assert isinstance(resource(test_path), SAS7BDAT) def test_discover_sas(): assert discover(sasfile) == ds def test_convert_sas_to_dataframe(): df = convert(pd.DataFrame, sasfile) assert isinstance(df, pd.DataFrame) # pandas doesn't support date expected = str(ds.measure).replace('date', 'datetime') assert str(discover(df).measure).replace('?', '') == expected def test_convert_sas_to_list(): out = convert(list, sasfile) assert isinstance(out, list) assert not any(isinstance(item, str) for item in out[0]) # No header assert all(isinstance(ln, list) for ln in out) def test_convert_sas_to_iterator(): itr = sas_to_iterator(sasfile) assert isinstance(itr, Iterator) def test_append_sas_to_sqlite_round_trip(): expected = convert(set, sasfile) with tmpfile('db') as fn: r = resource('sqlite:///%s::SAS' % fn, dshape=discover(sasfile)) append(r, sasfile) result = convert(set, r) assert expected == result
quantopian/odo
odo/backends/tests/test_sas.py
odo/backends/sparksql.py
import os import pytest from cfme.fixtures.terminalreporter import reporter from cfme.utils.datafile import data_path_for_filename from cfme.utils.datafile import load_data_file from cfme.utils.path import data_path from cfme.utils.path import log_path # Collection for storing unique combinations of data file paths # and filenames for usage reporting after a completed test run seen_data_files = set() @pytest.fixture(scope="module") def datafile(request): """datafile(filename, replacements) datafile fixture, with templating support Args: filename: filename to load from the data dir replacements: template replacements Returns: Path to the loaded datafile Usage: Given a filename, it will attempt to open the given file from the test's corresponding data dir. For example, this: datafile('testfile') # in tests/subdir/test_module_name.py Would return a file object representing this file: /path/to/cfme_tests/data/subdir/test_module_name/testfile Given a filename with a leading slash, it will attempt to load the file relative to the root of the data dir. For example, this: datafile('/common/testfile') # in tests/subdir/test_module_name.py Would return a file object representing this file: /path/to/cfme_tests/data/common/testfile Note that the test module name is not used with the leading slash. .. rubric:: Templates: This fixture can also handle template replacements. If the datafile being loaded is a python template, the dictionary of replacements can be passed as the 'replacements' keyword argument. In this case, the returned data file will be a NamedTemporaryFile prepopulated with the interpolated result from combining the template with the replacements mapping. * http://docs.python.org/2/library/string.html#template-strings * http://docs.python.org/2/library/tempfile.html#tempfile.NamedTemporaryFile """ return _FixtureDataFile(request) def pytest_addoption(parser): group = parser.getgroup('cfme') group.addoption('--udf-report', action='store_true', default=False, dest='udf_report', help='flag to generate an unused data files report') def pytest_sessionfinish(session, exitstatus): udf_log_file = log_path.join('unused_data_files.log') if udf_log_file.check(): # Clean up old udf log if it exists udf_log_file.remove() if session.config.option.udf_report is False: # Short out here if not making a report return # Output an unused data files log after a test run data_files = set() for dirpath, dirnames, filenames in os.walk(str(data_path)): for filename in filenames: filepath = os.path.join(dirpath, filename) data_files.add(filepath) unused_data_files = data_files - seen_data_files if unused_data_files: # Write the log of unused data files out, minus the data dir prefix udf_log = ''.join( (line[len(str(data_path)):] + '\n' for line in unused_data_files) ) udf_log_file.write(udf_log + '\n') # Throw a notice into the terminal reporter to check the log tr = reporter() tr.write_line('') tr.write_sep( '-', '%d unused data files after test run, check %s' % ( len(unused_data_files), udf_log_file.basename ) ) class _FixtureDataFile(object): def __init__(self, request): self.base_path = str(request.session.fspath) self.testmod_path = str(request.fspath) def __call__(self, filename, replacements=None): if filename.startswith('/'): complete_path = data_path_for_filename( filename.strip('/'), self.base_path) else: complete_path = data_path_for_filename( filename, self.base_path, self.testmod_path) seen_data_files.add(complete_path) return load_data_file(complete_path, replacements)
import operator from collections import namedtuple import fauxfactory import pytest from cfme import test_requirements from cfme.cloud.provider import CloudProvider from cfme.containers.provider import ContainersProvider from cfme.infrastructure.config_management import ConfigManager from cfme.infrastructure.config_management import ConfigSystem from cfme.infrastructure.provider import InfraProvider from cfme.markers.env_markers.provider import ONE_PER_CATEGORY from cfme.physical.provider import PhysicalProvider from cfme.services.myservice import MyService from cfme.services.workloads import TemplatesImages from cfme.services.workloads import VmsInstances from cfme.utils.appliance.implementations.ui import navigate_to from cfme.utils.blockers import BZ SearchParam = namedtuple("SearchParam", ["collection", "destination", "entity", "filter", "my_filters"]) pytestmark = [ pytest.mark.uncollectif(lambda param, appliance: (param.collection in [ConfigManager, 'ansible_tower_providers'] or param.filter == 'Job Template (Ansible Tower) : Name') or (appliance.version >= '5.11' and param.entity == 'network_load_balancers'), reason='load balancers are no longer supported in 5.11 -> BZ 1672949'), pytest.mark.meta(automates=[BZ(1402392)]) # should be only on test_filter_crud ] def _navigation(param, appliance): if isinstance(param.collection, str): view = navigate_to(getattr(appliance.collections, param.collection), param.destination) else: view = navigate_to(param.collection, param.destination) return view def _filter_displayed(filters, filter): if filters.is_displayed: assert filter, "Filter wasn't created!" else: pytest.fail("Filter wasn't created or filters tree is not displayed!") def _select_filter(filters, filter_name, param): if param.my_filters: if isinstance(param.my_filters, tuple): filters.tree.click_path(param.my_filters[1], "My Filters", filter_name) else: filters.tree.click_path("My Filters", filter_name) else: filters.navigation.select(filter_name) def _can_open_advanced_search(param, appliance): """ Polarion: assignee: anikifor casecomponent: WebUI caseimportance: critical initialEstimate: 1/10h """ view = _navigation(param, appliance) assert view.search.is_advanced_search_possible, (f"Advanced search not displayed " f"for {param.entity} " f"on {param.destination.lower()}") view.search.open_advanced_search() assert view.search.is_advanced_search_opened, (f"Advanced search failed to open " f"for {param.entity} " f"on {param.destination.lower()}") view.search.close_advanced_search() assert not view.search.is_advanced_search_opened, (f"Advanced search failed to close " f"for {param.entity} " f"on {param.destination.lower()}") def _filter_crud(param, appliance): """ Polarion: assignee: anikifor casecomponent: WebUI caseimportance: high initialEstimate: 1/10h """ filter_name = fauxfactory.gen_string('alphanumeric', 10) filter_value = fauxfactory.gen_string('alphanumeric', 10) filter_value_updated = fauxfactory.gen_string('alphanumeric', 10) view = _navigation(param, appliance) # create if ':' not in param.filter: # to test "Count of" field, values don't contain ':' filter_value = fauxfactory.gen_numeric_string(3) filter_value_updated = fauxfactory.gen_numeric_string(3) view.search.save_filter( "fill_count({}, =, {})".format(param.filter, filter_value), filter_name) else: view.search.save_filter( "fill_field({}, =, {})".format(param.filter, filter_value), filter_name) view.search.close_advanced_search() view.flash.assert_no_error() # read if param.my_filters: if isinstance(param.my_filters, tuple): filters = operator.attrgetter(param.my_filters[0])(view) _filter_displayed(filters, filters.tree.has_path(param.my_filters[1], "My Filters", filter_name)) else: filters = operator.attrgetter(param.my_filters)(view) _filter_displayed(filters, filters.tree.has_path("My Filters", filter_name)) else: filters = view.my_filters _filter_displayed(filters, filters.navigation.has_item(filter_name)) # update _select_filter(filters, filter_name, param) view.search.open_advanced_search() view.search.advanced_search_form.search_exp_editor.select_first_expression() if ':' not in param.filter: # to test "Count of" field view.search.advanced_search_form.search_exp_editor.fill_count(count=param.filter, key='=', value=filter_value_updated) else: view.search.advanced_search_form.search_exp_editor.fill_field(field=param.filter, key='=', value=filter_value_updated) # save expression view.search.advanced_search_form.save_filter_button.click() # save filter view.search.advanced_search_form.save_filter_button.click() view.search.close_advanced_search() _select_filter(filters, filter_name, param) # read after update view.search.open_advanced_search() exp_text = view.search.advanced_search_form.search_exp_editor.expression_text assert filter_value_updated in exp_text, "Filter wasn't changed!" # delete view.search.delete_filter() view.search.close_advanced_search() if param.my_filters: if filters.is_displayed: if isinstance(param.my_filters, tuple): assert not filters.tree.has_path(param.my_filters[1], "My Filters", filter_name), "Filter wasn't deleted!" else: assert not filters.tree.has_path("My Filters", filter_name), "Filter wasn't deleted!" else: if view.my_filters.is_displayed: assert not view.my_filters.navigation.has_item(filter_name), "Filter wasn't deleted!" _tests = [_can_open_advanced_search, _filter_crud] def methodized(metafunc): """Transform function to method by adding self argument works just for specific functions in this file, would be nice to generalize TODO generalize for more tests with possibly different arguments """ def func(self, param, appliance): return metafunc(param, appliance) func.__doc__ = metafunc.__doc__ return func def inject_tests(metaclass): """Attach tests to decorated class uses _tests - list of test functions """ for test in _tests: method = methodized(test) setattr(metaclass, f"test{test.__name__}", method) return metaclass def base_pytestmarks(param_values, setup_prov=False): return [ test_requirements.filtering, pytest.mark.parametrize( 'param', param_values, ids=['{}-{}'.format(param.entity, param.destination.lower()) for param in param_values], scope="class" )] + ([pytest.mark.usefixtures("setup_provider")] if setup_prov else []) @inject_tests @pytest.mark.provider([CloudProvider], selector=ONE_PER_CATEGORY) class TestCloud(object): params_values = [ SearchParam('cloud_providers', 'All', 'cloudprovider', 'Cloud Provider : Name', None), SearchParam('cloud_av_zones', 'All', 'availabilityzone', 'Availability Zone : Name', None), SearchParam('cloud_host_aggregates', 'All', 'hostaggregate', 'Host Aggregate : Name', None), SearchParam('cloud_tenants', 'All', 'tenant', 'Cloud Tenant : Name', None), SearchParam('cloud_flavors', 'All', 'flavor', 'Flavor : Name', None), SearchParam('cloud_instances', 'All', 'instances', 'Instance : Name', ('sidebar.instances', "All Instances")), SearchParam('cloud_images', 'All', 'images', 'Image : Name', ('sidebar.images', "All Images")), SearchParam('cloud_stacks', 'All', 'orchestration_stacks', 'Orchestration Stack : Name', None), SearchParam('cloud_keypairs', 'All', 'key_pairs', 'Key Pair : Name', None) ] pytestmark = base_pytestmarks(params_values, True) @inject_tests @pytest.mark.provider([CloudProvider], selector=ONE_PER_CATEGORY) class TestNetwork(object): params_values = [ SearchParam('network_providers', 'All', 'network_managers', 'Network Manager : Name', None), SearchParam('network_providers', 'All', 'network_managers', 'Network Manager : Name', None), SearchParam('cloud_networks', 'All', 'network_networks', 'Cloud Network : Name', None), SearchParam('network_subnets', 'All', 'network_subnets', 'Cloud Subnet : Name', None), SearchParam('network_routers', 'All', 'network_routers', 'Network Router : Name', None), SearchParam('network_security_groups', 'All', 'network_security_groups', 'Security Group : Name', None), SearchParam('network_floating_ips', 'All', 'network_floating_ips', 'Floating IP : Address', None), SearchParam('network_ports', 'All', 'network_ports', 'Network Port : Name', None), SearchParam('balancers', 'All', 'network_load_balancers', 'Load Balancer : Name', None)] pytestmark = base_pytestmarks(params_values, True) @inject_tests @pytest.mark.provider([InfraProvider], selector=ONE_PER_CATEGORY, ) class TestInfra(object): params_values = [ SearchParam('infra_providers', 'All', 'infraproviders', 'Infrastructure Provider : Name', None), SearchParam('clusters', 'All', 'clusters', 'Cluster / Deployment Role : Name', None), SearchParam('hosts', 'All', 'hosts', 'Host / Node : Name', None), SearchParam('hosts', 'All', 'hosts', 'Host / Node.VMs', None), SearchParam('infra_vms', 'VMsOnly', 'vms', 'Virtual Machine : Name', ('sidebar.vms', "All VMs")), SearchParam('infra_templates', 'TemplatesOnly', 'templates', 'Template : Name', ('sidebar.templates', "All Templates")), SearchParam('resource_pools', 'All', 'resource_pools', 'Resource Pool : Name', None), SearchParam('datastores', 'All', 'datastores', 'Datastore : Name', ('sidebar.datastores', "All Datastores")), SearchParam(VmsInstances, 'All', 'workloads_vms', 'VM and Instance : Name', ('vms', "All VMs & Instances")), SearchParam(TemplatesImages, 'All', 'workloads_templates', 'VM Template and Image : Name', ('templates', "All Templates & Images")), ] pytestmark = base_pytestmarks(params_values, True) @inject_tests @pytest.mark.provider([PhysicalProvider], selector=ONE_PER_CATEGORY) class TestPhysical(object): params_values = [ SearchParam('physical_providers', 'All', 'physical_providers', 'Physical Infrastructure Provider : Name', None), SearchParam('physical_servers', 'All', 'physical_servers', 'Physical Server : Name', None) ] pytestmark = base_pytestmarks(params_values, True) @inject_tests @pytest.mark.provider([ContainersProvider], selector=ONE_PER_CATEGORY) class TestContainers(object): params_values = [ SearchParam( 'containers_providers', 'All', 'container_providers', 'Containers Provider : Name', None), SearchParam('container_projects', 'All', 'container_projects', 'Container Project : Name', None), SearchParam('container_routes', 'All', 'container_routes', 'Container Route : Name', None), SearchParam('container_services', 'All', 'container_services', 'Container Service : Name', None), SearchParam('container_replicators', 'All', 'container_replicators', 'Container Replicator : Name', None), SearchParam('container_pods', 'All', 'container_pods', 'Container Pod : Name', None), SearchParam('containers', 'All', 'containers', 'Container : Name', None), SearchParam('container_nodes', 'All', 'container_nodes', 'Container Node : Name', None), SearchParam('container_volumes', 'All', 'container_volumes', 'Persistent Volume : Name', None), SearchParam('container_builds', 'All', 'container_builds', 'Container Build : Name', None), SearchParam('container_image_registries', 'All', 'image_registries', 'Container Image Registry : Name', None), SearchParam('container_images', 'All', 'container_images', 'Container Image : Name', None), SearchParam('container_templates', 'All', 'container_templates', 'Container Template : Name', None) ] pytestmark = base_pytestmarks(params_values, True) @inject_tests class TestAnsibleTower(object): params_values = [ SearchParam('ansible_tower_providers', 'All', 'ansible_tower_explorer_provider', 'Automation Manager (Ansible Tower) : Name', ('sidebar.providers', 'All Ansible Tower Providers')), SearchParam('ansible_tower_systems', 'All', 'ansible_tower_explorer_system', 'Configured System (Ansible Tower) : Hostname', ('sidebar.configured_systems', 'All Ansible Tower Configured Systems')), SearchParam('ansible_tower_job_templates', 'All', 'ansible_tower_explorer_job_templates', 'Job Template (Ansible Tower) : Name', ('sidebar.job_templates', 'All Ansible Tower Job Templates')), SearchParam('ansible_tower_jobs', 'All', 'ansible_tower_jobs', 'Ansible Tower Job : Name', None)] pytestmark = base_pytestmarks(params_values) @inject_tests class TestStorage(object): params_values = [ SearchParam('volumes', 'All', 'block_store_volumes', 'Cloud Volume : Name', None), SearchParam('volume_snapshots', 'All', 'block_store_snapshots', 'Cloud Volume Snapshot : Name', None), SearchParam('volume_backups', 'All', 'block_store_backups', 'Cloud Volume Backup : Name', None), SearchParam('object_store_containers', 'All', 'object_store_containers', 'Cloud Object Store Container : Name', None), SearchParam('object_store_objects', 'All', 'object_store_objects', 'Cloud Object Store Object : Name', None), ] pytestmark = base_pytestmarks(params_values) @inject_tests class TestConfigManagement(object): params_values = [ SearchParam(ConfigManager, 'All', 'configuration_management', 'Configuration Manager : Name', ('sidebar.providers', "All Configuration Management Providers")), SearchParam(ConfigSystem, 'All', 'configuration_management_systems', 'Configured System (Red Hat Satellite) : Hostname', ('sidebar.configured_systems', "All Configured Systems")), ] pytestmark = base_pytestmarks(params_values) @inject_tests @pytest.mark.meta(blockers=[BZ(1733489)]) class TestServices(object): params_values = [SearchParam(MyService, 'All', 'myservices', 'Service : Name', 'myservice')] pytestmark = base_pytestmarks(params_values)
izapolsk/integration_tests
cfme/tests/webui/test_advanced_search.py
cfme/fixtures/datafile.py
import attr from riggerlib import recursive_update from cfme.cloud.instance import Instance from cfme.cloud.instance import InstanceCollection @attr.s class GCEInstance(Instance): # CFME & provider power control options START = "Start" POWER_ON = START # For compatibility with the infra objects. STOP = "Stop" DELETE = "Delete" TERMINATE = 'Delete' # CFME-only power control options SOFT_REBOOT = "Soft Reboot" # Provider-only power control options RESTART = "Restart" # CFME power states STATE_ON = "on" STATE_OFF = "off" STATE_SUSPENDED = "suspended" STATE_TERMINATED = "terminated" STATE_ARCHIVED = "archived" STATE_UNKNOWN = "unknown" @property def ui_powerstates_available(self): return { 'on': [self.STOP, self.SOFT_REBOOT, self.TERMINATE], 'off': [self.START, self.TERMINATE]} @property def ui_powerstates_unavailable(self): return { 'on': [self.START], 'off': [self.STOP, self.SOFT_REBOOT]} @property def vm_default_args(self): """Represents dictionary used for Vm/Instance provision with GCE mandatory default args""" inst_args = super(GCEInstance, self).vm_default_args provisioning = self.provider.data['provisioning'] inst_args['properties']['boot_disk_size'] = provisioning.get('boot_disk_size', '10 GB') return inst_args @property def vm_default_args_rest(self): inst_args = super(GCEInstance, self).vm_default_args_rest provisioning = self.provider.data['provisioning'] recursive_update(inst_args, { 'vm_fields': { 'boot_disk_size': provisioning['boot_disk_size'].replace(' ', '.')}}) return inst_args @attr.s class GCEInstanceCollection(InstanceCollection): ENTITY = GCEInstance
import operator from collections import namedtuple import fauxfactory import pytest from cfme import test_requirements from cfme.cloud.provider import CloudProvider from cfme.containers.provider import ContainersProvider from cfme.infrastructure.config_management import ConfigManager from cfme.infrastructure.config_management import ConfigSystem from cfme.infrastructure.provider import InfraProvider from cfme.markers.env_markers.provider import ONE_PER_CATEGORY from cfme.physical.provider import PhysicalProvider from cfme.services.myservice import MyService from cfme.services.workloads import TemplatesImages from cfme.services.workloads import VmsInstances from cfme.utils.appliance.implementations.ui import navigate_to from cfme.utils.blockers import BZ SearchParam = namedtuple("SearchParam", ["collection", "destination", "entity", "filter", "my_filters"]) pytestmark = [ pytest.mark.uncollectif(lambda param, appliance: (param.collection in [ConfigManager, 'ansible_tower_providers'] or param.filter == 'Job Template (Ansible Tower) : Name') or (appliance.version >= '5.11' and param.entity == 'network_load_balancers'), reason='load balancers are no longer supported in 5.11 -> BZ 1672949'), pytest.mark.meta(automates=[BZ(1402392)]) # should be only on test_filter_crud ] def _navigation(param, appliance): if isinstance(param.collection, str): view = navigate_to(getattr(appliance.collections, param.collection), param.destination) else: view = navigate_to(param.collection, param.destination) return view def _filter_displayed(filters, filter): if filters.is_displayed: assert filter, "Filter wasn't created!" else: pytest.fail("Filter wasn't created or filters tree is not displayed!") def _select_filter(filters, filter_name, param): if param.my_filters: if isinstance(param.my_filters, tuple): filters.tree.click_path(param.my_filters[1], "My Filters", filter_name) else: filters.tree.click_path("My Filters", filter_name) else: filters.navigation.select(filter_name) def _can_open_advanced_search(param, appliance): """ Polarion: assignee: anikifor casecomponent: WebUI caseimportance: critical initialEstimate: 1/10h """ view = _navigation(param, appliance) assert view.search.is_advanced_search_possible, (f"Advanced search not displayed " f"for {param.entity} " f"on {param.destination.lower()}") view.search.open_advanced_search() assert view.search.is_advanced_search_opened, (f"Advanced search failed to open " f"for {param.entity} " f"on {param.destination.lower()}") view.search.close_advanced_search() assert not view.search.is_advanced_search_opened, (f"Advanced search failed to close " f"for {param.entity} " f"on {param.destination.lower()}") def _filter_crud(param, appliance): """ Polarion: assignee: anikifor casecomponent: WebUI caseimportance: high initialEstimate: 1/10h """ filter_name = fauxfactory.gen_string('alphanumeric', 10) filter_value = fauxfactory.gen_string('alphanumeric', 10) filter_value_updated = fauxfactory.gen_string('alphanumeric', 10) view = _navigation(param, appliance) # create if ':' not in param.filter: # to test "Count of" field, values don't contain ':' filter_value = fauxfactory.gen_numeric_string(3) filter_value_updated = fauxfactory.gen_numeric_string(3) view.search.save_filter( "fill_count({}, =, {})".format(param.filter, filter_value), filter_name) else: view.search.save_filter( "fill_field({}, =, {})".format(param.filter, filter_value), filter_name) view.search.close_advanced_search() view.flash.assert_no_error() # read if param.my_filters: if isinstance(param.my_filters, tuple): filters = operator.attrgetter(param.my_filters[0])(view) _filter_displayed(filters, filters.tree.has_path(param.my_filters[1], "My Filters", filter_name)) else: filters = operator.attrgetter(param.my_filters)(view) _filter_displayed(filters, filters.tree.has_path("My Filters", filter_name)) else: filters = view.my_filters _filter_displayed(filters, filters.navigation.has_item(filter_name)) # update _select_filter(filters, filter_name, param) view.search.open_advanced_search() view.search.advanced_search_form.search_exp_editor.select_first_expression() if ':' not in param.filter: # to test "Count of" field view.search.advanced_search_form.search_exp_editor.fill_count(count=param.filter, key='=', value=filter_value_updated) else: view.search.advanced_search_form.search_exp_editor.fill_field(field=param.filter, key='=', value=filter_value_updated) # save expression view.search.advanced_search_form.save_filter_button.click() # save filter view.search.advanced_search_form.save_filter_button.click() view.search.close_advanced_search() _select_filter(filters, filter_name, param) # read after update view.search.open_advanced_search() exp_text = view.search.advanced_search_form.search_exp_editor.expression_text assert filter_value_updated in exp_text, "Filter wasn't changed!" # delete view.search.delete_filter() view.search.close_advanced_search() if param.my_filters: if filters.is_displayed: if isinstance(param.my_filters, tuple): assert not filters.tree.has_path(param.my_filters[1], "My Filters", filter_name), "Filter wasn't deleted!" else: assert not filters.tree.has_path("My Filters", filter_name), "Filter wasn't deleted!" else: if view.my_filters.is_displayed: assert not view.my_filters.navigation.has_item(filter_name), "Filter wasn't deleted!" _tests = [_can_open_advanced_search, _filter_crud] def methodized(metafunc): """Transform function to method by adding self argument works just for specific functions in this file, would be nice to generalize TODO generalize for more tests with possibly different arguments """ def func(self, param, appliance): return metafunc(param, appliance) func.__doc__ = metafunc.__doc__ return func def inject_tests(metaclass): """Attach tests to decorated class uses _tests - list of test functions """ for test in _tests: method = methodized(test) setattr(metaclass, f"test{test.__name__}", method) return metaclass def base_pytestmarks(param_values, setup_prov=False): return [ test_requirements.filtering, pytest.mark.parametrize( 'param', param_values, ids=['{}-{}'.format(param.entity, param.destination.lower()) for param in param_values], scope="class" )] + ([pytest.mark.usefixtures("setup_provider")] if setup_prov else []) @inject_tests @pytest.mark.provider([CloudProvider], selector=ONE_PER_CATEGORY) class TestCloud(object): params_values = [ SearchParam('cloud_providers', 'All', 'cloudprovider', 'Cloud Provider : Name', None), SearchParam('cloud_av_zones', 'All', 'availabilityzone', 'Availability Zone : Name', None), SearchParam('cloud_host_aggregates', 'All', 'hostaggregate', 'Host Aggregate : Name', None), SearchParam('cloud_tenants', 'All', 'tenant', 'Cloud Tenant : Name', None), SearchParam('cloud_flavors', 'All', 'flavor', 'Flavor : Name', None), SearchParam('cloud_instances', 'All', 'instances', 'Instance : Name', ('sidebar.instances', "All Instances")), SearchParam('cloud_images', 'All', 'images', 'Image : Name', ('sidebar.images', "All Images")), SearchParam('cloud_stacks', 'All', 'orchestration_stacks', 'Orchestration Stack : Name', None), SearchParam('cloud_keypairs', 'All', 'key_pairs', 'Key Pair : Name', None) ] pytestmark = base_pytestmarks(params_values, True) @inject_tests @pytest.mark.provider([CloudProvider], selector=ONE_PER_CATEGORY) class TestNetwork(object): params_values = [ SearchParam('network_providers', 'All', 'network_managers', 'Network Manager : Name', None), SearchParam('network_providers', 'All', 'network_managers', 'Network Manager : Name', None), SearchParam('cloud_networks', 'All', 'network_networks', 'Cloud Network : Name', None), SearchParam('network_subnets', 'All', 'network_subnets', 'Cloud Subnet : Name', None), SearchParam('network_routers', 'All', 'network_routers', 'Network Router : Name', None), SearchParam('network_security_groups', 'All', 'network_security_groups', 'Security Group : Name', None), SearchParam('network_floating_ips', 'All', 'network_floating_ips', 'Floating IP : Address', None), SearchParam('network_ports', 'All', 'network_ports', 'Network Port : Name', None), SearchParam('balancers', 'All', 'network_load_balancers', 'Load Balancer : Name', None)] pytestmark = base_pytestmarks(params_values, True) @inject_tests @pytest.mark.provider([InfraProvider], selector=ONE_PER_CATEGORY, ) class TestInfra(object): params_values = [ SearchParam('infra_providers', 'All', 'infraproviders', 'Infrastructure Provider : Name', None), SearchParam('clusters', 'All', 'clusters', 'Cluster / Deployment Role : Name', None), SearchParam('hosts', 'All', 'hosts', 'Host / Node : Name', None), SearchParam('hosts', 'All', 'hosts', 'Host / Node.VMs', None), SearchParam('infra_vms', 'VMsOnly', 'vms', 'Virtual Machine : Name', ('sidebar.vms', "All VMs")), SearchParam('infra_templates', 'TemplatesOnly', 'templates', 'Template : Name', ('sidebar.templates', "All Templates")), SearchParam('resource_pools', 'All', 'resource_pools', 'Resource Pool : Name', None), SearchParam('datastores', 'All', 'datastores', 'Datastore : Name', ('sidebar.datastores', "All Datastores")), SearchParam(VmsInstances, 'All', 'workloads_vms', 'VM and Instance : Name', ('vms', "All VMs & Instances")), SearchParam(TemplatesImages, 'All', 'workloads_templates', 'VM Template and Image : Name', ('templates', "All Templates & Images")), ] pytestmark = base_pytestmarks(params_values, True) @inject_tests @pytest.mark.provider([PhysicalProvider], selector=ONE_PER_CATEGORY) class TestPhysical(object): params_values = [ SearchParam('physical_providers', 'All', 'physical_providers', 'Physical Infrastructure Provider : Name', None), SearchParam('physical_servers', 'All', 'physical_servers', 'Physical Server : Name', None) ] pytestmark = base_pytestmarks(params_values, True) @inject_tests @pytest.mark.provider([ContainersProvider], selector=ONE_PER_CATEGORY) class TestContainers(object): params_values = [ SearchParam( 'containers_providers', 'All', 'container_providers', 'Containers Provider : Name', None), SearchParam('container_projects', 'All', 'container_projects', 'Container Project : Name', None), SearchParam('container_routes', 'All', 'container_routes', 'Container Route : Name', None), SearchParam('container_services', 'All', 'container_services', 'Container Service : Name', None), SearchParam('container_replicators', 'All', 'container_replicators', 'Container Replicator : Name', None), SearchParam('container_pods', 'All', 'container_pods', 'Container Pod : Name', None), SearchParam('containers', 'All', 'containers', 'Container : Name', None), SearchParam('container_nodes', 'All', 'container_nodes', 'Container Node : Name', None), SearchParam('container_volumes', 'All', 'container_volumes', 'Persistent Volume : Name', None), SearchParam('container_builds', 'All', 'container_builds', 'Container Build : Name', None), SearchParam('container_image_registries', 'All', 'image_registries', 'Container Image Registry : Name', None), SearchParam('container_images', 'All', 'container_images', 'Container Image : Name', None), SearchParam('container_templates', 'All', 'container_templates', 'Container Template : Name', None) ] pytestmark = base_pytestmarks(params_values, True) @inject_tests class TestAnsibleTower(object): params_values = [ SearchParam('ansible_tower_providers', 'All', 'ansible_tower_explorer_provider', 'Automation Manager (Ansible Tower) : Name', ('sidebar.providers', 'All Ansible Tower Providers')), SearchParam('ansible_tower_systems', 'All', 'ansible_tower_explorer_system', 'Configured System (Ansible Tower) : Hostname', ('sidebar.configured_systems', 'All Ansible Tower Configured Systems')), SearchParam('ansible_tower_job_templates', 'All', 'ansible_tower_explorer_job_templates', 'Job Template (Ansible Tower) : Name', ('sidebar.job_templates', 'All Ansible Tower Job Templates')), SearchParam('ansible_tower_jobs', 'All', 'ansible_tower_jobs', 'Ansible Tower Job : Name', None)] pytestmark = base_pytestmarks(params_values) @inject_tests class TestStorage(object): params_values = [ SearchParam('volumes', 'All', 'block_store_volumes', 'Cloud Volume : Name', None), SearchParam('volume_snapshots', 'All', 'block_store_snapshots', 'Cloud Volume Snapshot : Name', None), SearchParam('volume_backups', 'All', 'block_store_backups', 'Cloud Volume Backup : Name', None), SearchParam('object_store_containers', 'All', 'object_store_containers', 'Cloud Object Store Container : Name', None), SearchParam('object_store_objects', 'All', 'object_store_objects', 'Cloud Object Store Object : Name', None), ] pytestmark = base_pytestmarks(params_values) @inject_tests class TestConfigManagement(object): params_values = [ SearchParam(ConfigManager, 'All', 'configuration_management', 'Configuration Manager : Name', ('sidebar.providers', "All Configuration Management Providers")), SearchParam(ConfigSystem, 'All', 'configuration_management_systems', 'Configured System (Red Hat Satellite) : Hostname', ('sidebar.configured_systems', "All Configured Systems")), ] pytestmark = base_pytestmarks(params_values) @inject_tests @pytest.mark.meta(blockers=[BZ(1733489)]) class TestServices(object): params_values = [SearchParam(MyService, 'All', 'myservices', 'Service : Name', 'myservice')] pytestmark = base_pytestmarks(params_values)
izapolsk/integration_tests
cfme/tests/webui/test_advanced_search.py
cfme/cloud/instance/gce.py
"""Module handling report menus contents""" from contextlib import contextmanager import attr from navmazing import NavigateToAttribute from widgetastic.widget import Text from widgetastic_patternfly import Button from cfme.intelligence.reports import CloudIntelReportsView from cfme.intelligence.reports import ReportsMultiBoxSelect from cfme.modeling.base import BaseCollection from cfme.modeling.base import BaseEntity from cfme.utils.appliance.implementations.ui import CFMENavigateStep from cfme.utils.appliance.implementations.ui import navigate_to from cfme.utils.appliance.implementations.ui import navigator from widgetastic_manageiq import FolderManager from widgetastic_manageiq import ManageIQTree class AllReportMenusView(CloudIntelReportsView): title = Text("#explorer_title_text") reports_tree = ManageIQTree("menu_roles_treebox") @property def is_displayed(self): return ( self.in_intel_reports and self.title.text == "All EVM Groups" and self.edit_report_menus.is_opened and self.edit_report_menus.tree.currently_selected == ["All EVM Groups"] ) class EditReportMenusView(AllReportMenusView): # Buttons save_button = Button("Save") reset_button = Button("Reset") default_button = Button("Default") cancel_button = Button("Cancel") commit_button = Button("Commit") discard_button = Button("Discard") manager = FolderManager(".//div[@id='folder_lists']/table") report_select = ReportsMultiBoxSelect( move_into="Move selected reports right", move_from="Move selected reports left", available_items="available_reports", chosen_items="selected_reports" ) @property def is_displayed(self): return ( self.in_intel_reports and self.title.text == 'Editing EVM Group "{}"'.format(self.context["object"].group) and self.edit_report_menus.is_opened and self.edit_report_menus.tree.currently_selected == [ "All EVM Groups", self.context["object"].group ] ) @attr.s class ReportMenu(BaseEntity): """ This is a fake class mainly needed for navmazing navigation. """ group = None def go_to_group(self, group_name): self.group = group_name view = navigate_to(self, "Edit") assert view.is_displayed return view def get_folders(self, group): """Returns list of folders for given user group. Args: group: User group to check. """ view = self.go_to_group(group) view.reports_tree.click_path("Top Level") fields = view.manager.fields view.discard_button.click() return fields def get_subfolders(self, group, folder): """Returns list of sub-folders for given user group and folder. Args: group: User group to check. folder: Folder to read. """ view = self.go_to_group(group) view.reports_tree.click_path("Top Level", folder) fields = view.manager.fields view.discard_button.click() return fields def _action(self, action, manager, folder_name): with manager as folder_manager: getattr(folder_manager, action)(folder_name) def add_folder(self, group, folder): """Adds a folder under top-level. Args: group: User group. folder: Name of the new folder. """ self._action("add", self.manage_folder(group), folder) def add_subfolder(self, group, folder, subfolder): """Adds a subfolder under specified folder. Args: group: User group. folder: Name of the folder. subfolder: Name of the new subfolder. """ self._action("add", self.manage_folder(group, folder), subfolder) def remove_folder(self, group, folder): """Removes a folder under top-level. Args: group: User group. folder: Name of the folder. """ self._action("delete", self.manage_folder(group), folder) def remove_subfolder(self, group, folder, subfolder): """Removes a subfolder under specified folder. Args: group: User group. folder: Name of the folder. subfolder: Name of the subfolder. """ self._action("delete", self.manage_folder(group, folder), subfolder) def reset_to_default(self, group): """Clicks the `Default` button. Args: group: Group to set to Default """ view = self.go_to_group(group) view.default_button.click() view.save_button.click() flash_view = self.create_view(AllReportMenusView) assert flash_view.flash.assert_message( 'Report Menu for role "{}" was saved'.format(group) ) @contextmanager def manage_subfolder(self, group, folder, subfolder): """Context manager to use when modifying the subfolder contents. You can use manager's :py:meth:`FolderManager.bail_out` classmethod to end and discard the changes done inside the with block. Args: group: User group. folder: Parent folder name. subfolder: Subfolder name to manage. Returns: Context-managed :py:class: `widgetastic_manageiq.MultiBoxSelect` instance """ view = self.go_to_group(group) view.reports_tree.click_path("Top Level", folder, subfolder) try: yield view.report_select except FolderManager._BailOut: view.discard_button.click() except Exception: # In case of any exception, nothing will be saved view.discard_button.click() raise # And reraise the exception else: # If no exception happens, save! view.commit_button.click() view.save_button.click() flash_view = self.create_view(AllReportMenusView) flash_view.flash.assert_message( 'Report Menu for role "{}" was saved'.format(group) ) @contextmanager def manage_folder(self, group, folder=None): """Context manager to use when modifying the folder contents. You can use manager's :py:meth:`FolderManager.bail_out` classmethod to end and discard the changes done inside the with block. This context manager does not give the manager as a value to the with block so you have to import and use the :py:class:`FolderManager` class manually. Args: group: User group. folder: Which folder to manage. If None, top-level will be managed. Returns: Context-managed :py:class:`widgetastic_manageiq.FolderManager` instance """ view = self.go_to_group(group) if folder is None: view.reports_tree.click_path("Top Level") else: view.reports_tree.click_path("Top Level", folder) try: yield view.manager except FolderManager._BailOut: view.manager.discard() except Exception: # In case of any exception, nothing will be saved view.manager.discard() raise # And reraise the exception else: # If no exception happens, save! view.manager.commit() view.save_button.click() flash_view = self.create_view(AllReportMenusView) flash_view.flash.assert_message( 'Report Menu for role "{}" was saved'.format(group) ) def move_reports(self, group, folder, subfolder, *reports): """ Moves a list of reports to a given menu Args: group: User group folder: Parent of the subfolder where reports are to be moved. subfolder: Subfolder under which the reports are to be moved. reports: List of reports that are to be moved. """ reports = list(reports) cancel_view = "" with self.manage_subfolder(group, folder, subfolder) as selected_menu: selected_options = selected_menu.parent_view.report_select.all_options diff = set(selected_options) & set(reports) if diff and (len(diff) == len(reports)): cancel_view = self.create_view(AllReportMenusView) # If all the reports to be moved are already present, raise an exception to exit. raise FolderManager._BailOut # fill method replaces all the options in all_options with the value passed as argument # We do not want to replace any value, we just want to move the new reports to a given # menu. This is a work-around for that purpose. reports.extend(selected_options) selected_menu.parent_view.report_select.fill(reports) if cancel_view: cancel_view.flash.assert_message( 'Edit of Report Menu for role "{}" was cancelled by the user'.format( group ) ) @attr.s class ReportMenusCollection(BaseCollection): """Collection object for the :py:class:'cfme.intelligence.reports.ReportMenu'.""" ENTITY = ReportMenu @navigator.register(ReportMenu, "Edit") class EditReportMenus(CFMENavigateStep): VIEW = EditReportMenusView prerequisite = NavigateToAttribute( "appliance.collections.intel_report_menus", "All" ) def step(self, *args, **kwargs): self.prerequisite_view.edit_report_menus.tree.click_path( "All EVM Groups", self.obj.group ) @navigator.register(ReportMenusCollection, "All") class ReportMenus(CFMENavigateStep): VIEW = AllReportMenusView prerequisite = NavigateToAttribute("appliance.server", "CloudIntelReports") def step(self, *args, **kwargs): self.prerequisite_view.edit_report_menus.tree.click_path("All EVM Groups")
import operator from collections import namedtuple import fauxfactory import pytest from cfme import test_requirements from cfme.cloud.provider import CloudProvider from cfme.containers.provider import ContainersProvider from cfme.infrastructure.config_management import ConfigManager from cfme.infrastructure.config_management import ConfigSystem from cfme.infrastructure.provider import InfraProvider from cfme.markers.env_markers.provider import ONE_PER_CATEGORY from cfme.physical.provider import PhysicalProvider from cfme.services.myservice import MyService from cfme.services.workloads import TemplatesImages from cfme.services.workloads import VmsInstances from cfme.utils.appliance.implementations.ui import navigate_to from cfme.utils.blockers import BZ SearchParam = namedtuple("SearchParam", ["collection", "destination", "entity", "filter", "my_filters"]) pytestmark = [ pytest.mark.uncollectif(lambda param, appliance: (param.collection in [ConfigManager, 'ansible_tower_providers'] or param.filter == 'Job Template (Ansible Tower) : Name') or (appliance.version >= '5.11' and param.entity == 'network_load_balancers'), reason='load balancers are no longer supported in 5.11 -> BZ 1672949'), pytest.mark.meta(automates=[BZ(1402392)]) # should be only on test_filter_crud ] def _navigation(param, appliance): if isinstance(param.collection, str): view = navigate_to(getattr(appliance.collections, param.collection), param.destination) else: view = navigate_to(param.collection, param.destination) return view def _filter_displayed(filters, filter): if filters.is_displayed: assert filter, "Filter wasn't created!" else: pytest.fail("Filter wasn't created or filters tree is not displayed!") def _select_filter(filters, filter_name, param): if param.my_filters: if isinstance(param.my_filters, tuple): filters.tree.click_path(param.my_filters[1], "My Filters", filter_name) else: filters.tree.click_path("My Filters", filter_name) else: filters.navigation.select(filter_name) def _can_open_advanced_search(param, appliance): """ Polarion: assignee: anikifor casecomponent: WebUI caseimportance: critical initialEstimate: 1/10h """ view = _navigation(param, appliance) assert view.search.is_advanced_search_possible, (f"Advanced search not displayed " f"for {param.entity} " f"on {param.destination.lower()}") view.search.open_advanced_search() assert view.search.is_advanced_search_opened, (f"Advanced search failed to open " f"for {param.entity} " f"on {param.destination.lower()}") view.search.close_advanced_search() assert not view.search.is_advanced_search_opened, (f"Advanced search failed to close " f"for {param.entity} " f"on {param.destination.lower()}") def _filter_crud(param, appliance): """ Polarion: assignee: anikifor casecomponent: WebUI caseimportance: high initialEstimate: 1/10h """ filter_name = fauxfactory.gen_string('alphanumeric', 10) filter_value = fauxfactory.gen_string('alphanumeric', 10) filter_value_updated = fauxfactory.gen_string('alphanumeric', 10) view = _navigation(param, appliance) # create if ':' not in param.filter: # to test "Count of" field, values don't contain ':' filter_value = fauxfactory.gen_numeric_string(3) filter_value_updated = fauxfactory.gen_numeric_string(3) view.search.save_filter( "fill_count({}, =, {})".format(param.filter, filter_value), filter_name) else: view.search.save_filter( "fill_field({}, =, {})".format(param.filter, filter_value), filter_name) view.search.close_advanced_search() view.flash.assert_no_error() # read if param.my_filters: if isinstance(param.my_filters, tuple): filters = operator.attrgetter(param.my_filters[0])(view) _filter_displayed(filters, filters.tree.has_path(param.my_filters[1], "My Filters", filter_name)) else: filters = operator.attrgetter(param.my_filters)(view) _filter_displayed(filters, filters.tree.has_path("My Filters", filter_name)) else: filters = view.my_filters _filter_displayed(filters, filters.navigation.has_item(filter_name)) # update _select_filter(filters, filter_name, param) view.search.open_advanced_search() view.search.advanced_search_form.search_exp_editor.select_first_expression() if ':' not in param.filter: # to test "Count of" field view.search.advanced_search_form.search_exp_editor.fill_count(count=param.filter, key='=', value=filter_value_updated) else: view.search.advanced_search_form.search_exp_editor.fill_field(field=param.filter, key='=', value=filter_value_updated) # save expression view.search.advanced_search_form.save_filter_button.click() # save filter view.search.advanced_search_form.save_filter_button.click() view.search.close_advanced_search() _select_filter(filters, filter_name, param) # read after update view.search.open_advanced_search() exp_text = view.search.advanced_search_form.search_exp_editor.expression_text assert filter_value_updated in exp_text, "Filter wasn't changed!" # delete view.search.delete_filter() view.search.close_advanced_search() if param.my_filters: if filters.is_displayed: if isinstance(param.my_filters, tuple): assert not filters.tree.has_path(param.my_filters[1], "My Filters", filter_name), "Filter wasn't deleted!" else: assert not filters.tree.has_path("My Filters", filter_name), "Filter wasn't deleted!" else: if view.my_filters.is_displayed: assert not view.my_filters.navigation.has_item(filter_name), "Filter wasn't deleted!" _tests = [_can_open_advanced_search, _filter_crud] def methodized(metafunc): """Transform function to method by adding self argument works just for specific functions in this file, would be nice to generalize TODO generalize for more tests with possibly different arguments """ def func(self, param, appliance): return metafunc(param, appliance) func.__doc__ = metafunc.__doc__ return func def inject_tests(metaclass): """Attach tests to decorated class uses _tests - list of test functions """ for test in _tests: method = methodized(test) setattr(metaclass, f"test{test.__name__}", method) return metaclass def base_pytestmarks(param_values, setup_prov=False): return [ test_requirements.filtering, pytest.mark.parametrize( 'param', param_values, ids=['{}-{}'.format(param.entity, param.destination.lower()) for param in param_values], scope="class" )] + ([pytest.mark.usefixtures("setup_provider")] if setup_prov else []) @inject_tests @pytest.mark.provider([CloudProvider], selector=ONE_PER_CATEGORY) class TestCloud(object): params_values = [ SearchParam('cloud_providers', 'All', 'cloudprovider', 'Cloud Provider : Name', None), SearchParam('cloud_av_zones', 'All', 'availabilityzone', 'Availability Zone : Name', None), SearchParam('cloud_host_aggregates', 'All', 'hostaggregate', 'Host Aggregate : Name', None), SearchParam('cloud_tenants', 'All', 'tenant', 'Cloud Tenant : Name', None), SearchParam('cloud_flavors', 'All', 'flavor', 'Flavor : Name', None), SearchParam('cloud_instances', 'All', 'instances', 'Instance : Name', ('sidebar.instances', "All Instances")), SearchParam('cloud_images', 'All', 'images', 'Image : Name', ('sidebar.images', "All Images")), SearchParam('cloud_stacks', 'All', 'orchestration_stacks', 'Orchestration Stack : Name', None), SearchParam('cloud_keypairs', 'All', 'key_pairs', 'Key Pair : Name', None) ] pytestmark = base_pytestmarks(params_values, True) @inject_tests @pytest.mark.provider([CloudProvider], selector=ONE_PER_CATEGORY) class TestNetwork(object): params_values = [ SearchParam('network_providers', 'All', 'network_managers', 'Network Manager : Name', None), SearchParam('network_providers', 'All', 'network_managers', 'Network Manager : Name', None), SearchParam('cloud_networks', 'All', 'network_networks', 'Cloud Network : Name', None), SearchParam('network_subnets', 'All', 'network_subnets', 'Cloud Subnet : Name', None), SearchParam('network_routers', 'All', 'network_routers', 'Network Router : Name', None), SearchParam('network_security_groups', 'All', 'network_security_groups', 'Security Group : Name', None), SearchParam('network_floating_ips', 'All', 'network_floating_ips', 'Floating IP : Address', None), SearchParam('network_ports', 'All', 'network_ports', 'Network Port : Name', None), SearchParam('balancers', 'All', 'network_load_balancers', 'Load Balancer : Name', None)] pytestmark = base_pytestmarks(params_values, True) @inject_tests @pytest.mark.provider([InfraProvider], selector=ONE_PER_CATEGORY, ) class TestInfra(object): params_values = [ SearchParam('infra_providers', 'All', 'infraproviders', 'Infrastructure Provider : Name', None), SearchParam('clusters', 'All', 'clusters', 'Cluster / Deployment Role : Name', None), SearchParam('hosts', 'All', 'hosts', 'Host / Node : Name', None), SearchParam('hosts', 'All', 'hosts', 'Host / Node.VMs', None), SearchParam('infra_vms', 'VMsOnly', 'vms', 'Virtual Machine : Name', ('sidebar.vms', "All VMs")), SearchParam('infra_templates', 'TemplatesOnly', 'templates', 'Template : Name', ('sidebar.templates', "All Templates")), SearchParam('resource_pools', 'All', 'resource_pools', 'Resource Pool : Name', None), SearchParam('datastores', 'All', 'datastores', 'Datastore : Name', ('sidebar.datastores', "All Datastores")), SearchParam(VmsInstances, 'All', 'workloads_vms', 'VM and Instance : Name', ('vms', "All VMs & Instances")), SearchParam(TemplatesImages, 'All', 'workloads_templates', 'VM Template and Image : Name', ('templates', "All Templates & Images")), ] pytestmark = base_pytestmarks(params_values, True) @inject_tests @pytest.mark.provider([PhysicalProvider], selector=ONE_PER_CATEGORY) class TestPhysical(object): params_values = [ SearchParam('physical_providers', 'All', 'physical_providers', 'Physical Infrastructure Provider : Name', None), SearchParam('physical_servers', 'All', 'physical_servers', 'Physical Server : Name', None) ] pytestmark = base_pytestmarks(params_values, True) @inject_tests @pytest.mark.provider([ContainersProvider], selector=ONE_PER_CATEGORY) class TestContainers(object): params_values = [ SearchParam( 'containers_providers', 'All', 'container_providers', 'Containers Provider : Name', None), SearchParam('container_projects', 'All', 'container_projects', 'Container Project : Name', None), SearchParam('container_routes', 'All', 'container_routes', 'Container Route : Name', None), SearchParam('container_services', 'All', 'container_services', 'Container Service : Name', None), SearchParam('container_replicators', 'All', 'container_replicators', 'Container Replicator : Name', None), SearchParam('container_pods', 'All', 'container_pods', 'Container Pod : Name', None), SearchParam('containers', 'All', 'containers', 'Container : Name', None), SearchParam('container_nodes', 'All', 'container_nodes', 'Container Node : Name', None), SearchParam('container_volumes', 'All', 'container_volumes', 'Persistent Volume : Name', None), SearchParam('container_builds', 'All', 'container_builds', 'Container Build : Name', None), SearchParam('container_image_registries', 'All', 'image_registries', 'Container Image Registry : Name', None), SearchParam('container_images', 'All', 'container_images', 'Container Image : Name', None), SearchParam('container_templates', 'All', 'container_templates', 'Container Template : Name', None) ] pytestmark = base_pytestmarks(params_values, True) @inject_tests class TestAnsibleTower(object): params_values = [ SearchParam('ansible_tower_providers', 'All', 'ansible_tower_explorer_provider', 'Automation Manager (Ansible Tower) : Name', ('sidebar.providers', 'All Ansible Tower Providers')), SearchParam('ansible_tower_systems', 'All', 'ansible_tower_explorer_system', 'Configured System (Ansible Tower) : Hostname', ('sidebar.configured_systems', 'All Ansible Tower Configured Systems')), SearchParam('ansible_tower_job_templates', 'All', 'ansible_tower_explorer_job_templates', 'Job Template (Ansible Tower) : Name', ('sidebar.job_templates', 'All Ansible Tower Job Templates')), SearchParam('ansible_tower_jobs', 'All', 'ansible_tower_jobs', 'Ansible Tower Job : Name', None)] pytestmark = base_pytestmarks(params_values) @inject_tests class TestStorage(object): params_values = [ SearchParam('volumes', 'All', 'block_store_volumes', 'Cloud Volume : Name', None), SearchParam('volume_snapshots', 'All', 'block_store_snapshots', 'Cloud Volume Snapshot : Name', None), SearchParam('volume_backups', 'All', 'block_store_backups', 'Cloud Volume Backup : Name', None), SearchParam('object_store_containers', 'All', 'object_store_containers', 'Cloud Object Store Container : Name', None), SearchParam('object_store_objects', 'All', 'object_store_objects', 'Cloud Object Store Object : Name', None), ] pytestmark = base_pytestmarks(params_values) @inject_tests class TestConfigManagement(object): params_values = [ SearchParam(ConfigManager, 'All', 'configuration_management', 'Configuration Manager : Name', ('sidebar.providers', "All Configuration Management Providers")), SearchParam(ConfigSystem, 'All', 'configuration_management_systems', 'Configured System (Red Hat Satellite) : Hostname', ('sidebar.configured_systems', "All Configured Systems")), ] pytestmark = base_pytestmarks(params_values) @inject_tests @pytest.mark.meta(blockers=[BZ(1733489)]) class TestServices(object): params_values = [SearchParam(MyService, 'All', 'myservices', 'Service : Name', 'myservice')] pytestmark = base_pytestmarks(params_values)
izapolsk/integration_tests
cfme/tests/webui/test_advanced_search.py
cfme/intelligence/reports/menus.py
import attr import importscan import sentaku from cfme.generic_objects.definition.button_groups import GenericObjectButtonGroupsCollection from cfme.generic_objects.definition.button_groups import GenericObjectButtonsCollection from cfme.generic_objects.instance import GenericObjectInstanceCollection from cfme.modeling.base import BaseCollection from cfme.modeling.base import BaseEntity from cfme.utils.update import Updateable @attr.s class GenericObjectDefinition(BaseEntity, Updateable, sentaku.modeling.ElementMixin): """Generic Objects Definition class to context switch between UI and REST. Read/Update/Delete functionality. """ _collections = { 'generic_objects': GenericObjectInstanceCollection, 'generic_object_groups_buttons': GenericObjectButtonGroupsCollection, 'generic_object_buttons': GenericObjectButtonsCollection } update = sentaku.ContextualMethod() delete = sentaku.ContextualMethod() exists = sentaku.ContextualProperty() add_button = sentaku.ContextualMethod() add_button_group = sentaku.ContextualMethod() generic_objects = sentaku.ContextualProperty() generic_object_buttons = sentaku.ContextualProperty() instance_count = sentaku.ContextualProperty() name = attr.ib() description = attr.ib() attributes = attr.ib(default=None) # e.g. {'address': 'string'} associations = attr.ib(default=None) # e.g. {'services': 'Service'} methods = attr.ib(default=None) # e.g. ['method1', 'method2'] custom_image_file_path = attr.ib(default=None) rest_response = attr.ib(default=None, init=False) @attr.s class GenericObjectDefinitionCollection(BaseCollection, sentaku.modeling.ElementMixin): ENTITY = GenericObjectDefinition create = sentaku.ContextualMethod() all = sentaku.ContextualMethod() from cfme.generic_objects.definition import rest, ui # NOQA last for import cycles importscan.scan(rest) importscan.scan(ui)
import operator from collections import namedtuple import fauxfactory import pytest from cfme import test_requirements from cfme.cloud.provider import CloudProvider from cfme.containers.provider import ContainersProvider from cfme.infrastructure.config_management import ConfigManager from cfme.infrastructure.config_management import ConfigSystem from cfme.infrastructure.provider import InfraProvider from cfme.markers.env_markers.provider import ONE_PER_CATEGORY from cfme.physical.provider import PhysicalProvider from cfme.services.myservice import MyService from cfme.services.workloads import TemplatesImages from cfme.services.workloads import VmsInstances from cfme.utils.appliance.implementations.ui import navigate_to from cfme.utils.blockers import BZ SearchParam = namedtuple("SearchParam", ["collection", "destination", "entity", "filter", "my_filters"]) pytestmark = [ pytest.mark.uncollectif(lambda param, appliance: (param.collection in [ConfigManager, 'ansible_tower_providers'] or param.filter == 'Job Template (Ansible Tower) : Name') or (appliance.version >= '5.11' and param.entity == 'network_load_balancers'), reason='load balancers are no longer supported in 5.11 -> BZ 1672949'), pytest.mark.meta(automates=[BZ(1402392)]) # should be only on test_filter_crud ] def _navigation(param, appliance): if isinstance(param.collection, str): view = navigate_to(getattr(appliance.collections, param.collection), param.destination) else: view = navigate_to(param.collection, param.destination) return view def _filter_displayed(filters, filter): if filters.is_displayed: assert filter, "Filter wasn't created!" else: pytest.fail("Filter wasn't created or filters tree is not displayed!") def _select_filter(filters, filter_name, param): if param.my_filters: if isinstance(param.my_filters, tuple): filters.tree.click_path(param.my_filters[1], "My Filters", filter_name) else: filters.tree.click_path("My Filters", filter_name) else: filters.navigation.select(filter_name) def _can_open_advanced_search(param, appliance): """ Polarion: assignee: anikifor casecomponent: WebUI caseimportance: critical initialEstimate: 1/10h """ view = _navigation(param, appliance) assert view.search.is_advanced_search_possible, (f"Advanced search not displayed " f"for {param.entity} " f"on {param.destination.lower()}") view.search.open_advanced_search() assert view.search.is_advanced_search_opened, (f"Advanced search failed to open " f"for {param.entity} " f"on {param.destination.lower()}") view.search.close_advanced_search() assert not view.search.is_advanced_search_opened, (f"Advanced search failed to close " f"for {param.entity} " f"on {param.destination.lower()}") def _filter_crud(param, appliance): """ Polarion: assignee: anikifor casecomponent: WebUI caseimportance: high initialEstimate: 1/10h """ filter_name = fauxfactory.gen_string('alphanumeric', 10) filter_value = fauxfactory.gen_string('alphanumeric', 10) filter_value_updated = fauxfactory.gen_string('alphanumeric', 10) view = _navigation(param, appliance) # create if ':' not in param.filter: # to test "Count of" field, values don't contain ':' filter_value = fauxfactory.gen_numeric_string(3) filter_value_updated = fauxfactory.gen_numeric_string(3) view.search.save_filter( "fill_count({}, =, {})".format(param.filter, filter_value), filter_name) else: view.search.save_filter( "fill_field({}, =, {})".format(param.filter, filter_value), filter_name) view.search.close_advanced_search() view.flash.assert_no_error() # read if param.my_filters: if isinstance(param.my_filters, tuple): filters = operator.attrgetter(param.my_filters[0])(view) _filter_displayed(filters, filters.tree.has_path(param.my_filters[1], "My Filters", filter_name)) else: filters = operator.attrgetter(param.my_filters)(view) _filter_displayed(filters, filters.tree.has_path("My Filters", filter_name)) else: filters = view.my_filters _filter_displayed(filters, filters.navigation.has_item(filter_name)) # update _select_filter(filters, filter_name, param) view.search.open_advanced_search() view.search.advanced_search_form.search_exp_editor.select_first_expression() if ':' not in param.filter: # to test "Count of" field view.search.advanced_search_form.search_exp_editor.fill_count(count=param.filter, key='=', value=filter_value_updated) else: view.search.advanced_search_form.search_exp_editor.fill_field(field=param.filter, key='=', value=filter_value_updated) # save expression view.search.advanced_search_form.save_filter_button.click() # save filter view.search.advanced_search_form.save_filter_button.click() view.search.close_advanced_search() _select_filter(filters, filter_name, param) # read after update view.search.open_advanced_search() exp_text = view.search.advanced_search_form.search_exp_editor.expression_text assert filter_value_updated in exp_text, "Filter wasn't changed!" # delete view.search.delete_filter() view.search.close_advanced_search() if param.my_filters: if filters.is_displayed: if isinstance(param.my_filters, tuple): assert not filters.tree.has_path(param.my_filters[1], "My Filters", filter_name), "Filter wasn't deleted!" else: assert not filters.tree.has_path("My Filters", filter_name), "Filter wasn't deleted!" else: if view.my_filters.is_displayed: assert not view.my_filters.navigation.has_item(filter_name), "Filter wasn't deleted!" _tests = [_can_open_advanced_search, _filter_crud] def methodized(metafunc): """Transform function to method by adding self argument works just for specific functions in this file, would be nice to generalize TODO generalize for more tests with possibly different arguments """ def func(self, param, appliance): return metafunc(param, appliance) func.__doc__ = metafunc.__doc__ return func def inject_tests(metaclass): """Attach tests to decorated class uses _tests - list of test functions """ for test in _tests: method = methodized(test) setattr(metaclass, f"test{test.__name__}", method) return metaclass def base_pytestmarks(param_values, setup_prov=False): return [ test_requirements.filtering, pytest.mark.parametrize( 'param', param_values, ids=['{}-{}'.format(param.entity, param.destination.lower()) for param in param_values], scope="class" )] + ([pytest.mark.usefixtures("setup_provider")] if setup_prov else []) @inject_tests @pytest.mark.provider([CloudProvider], selector=ONE_PER_CATEGORY) class TestCloud(object): params_values = [ SearchParam('cloud_providers', 'All', 'cloudprovider', 'Cloud Provider : Name', None), SearchParam('cloud_av_zones', 'All', 'availabilityzone', 'Availability Zone : Name', None), SearchParam('cloud_host_aggregates', 'All', 'hostaggregate', 'Host Aggregate : Name', None), SearchParam('cloud_tenants', 'All', 'tenant', 'Cloud Tenant : Name', None), SearchParam('cloud_flavors', 'All', 'flavor', 'Flavor : Name', None), SearchParam('cloud_instances', 'All', 'instances', 'Instance : Name', ('sidebar.instances', "All Instances")), SearchParam('cloud_images', 'All', 'images', 'Image : Name', ('sidebar.images', "All Images")), SearchParam('cloud_stacks', 'All', 'orchestration_stacks', 'Orchestration Stack : Name', None), SearchParam('cloud_keypairs', 'All', 'key_pairs', 'Key Pair : Name', None) ] pytestmark = base_pytestmarks(params_values, True) @inject_tests @pytest.mark.provider([CloudProvider], selector=ONE_PER_CATEGORY) class TestNetwork(object): params_values = [ SearchParam('network_providers', 'All', 'network_managers', 'Network Manager : Name', None), SearchParam('network_providers', 'All', 'network_managers', 'Network Manager : Name', None), SearchParam('cloud_networks', 'All', 'network_networks', 'Cloud Network : Name', None), SearchParam('network_subnets', 'All', 'network_subnets', 'Cloud Subnet : Name', None), SearchParam('network_routers', 'All', 'network_routers', 'Network Router : Name', None), SearchParam('network_security_groups', 'All', 'network_security_groups', 'Security Group : Name', None), SearchParam('network_floating_ips', 'All', 'network_floating_ips', 'Floating IP : Address', None), SearchParam('network_ports', 'All', 'network_ports', 'Network Port : Name', None), SearchParam('balancers', 'All', 'network_load_balancers', 'Load Balancer : Name', None)] pytestmark = base_pytestmarks(params_values, True) @inject_tests @pytest.mark.provider([InfraProvider], selector=ONE_PER_CATEGORY, ) class TestInfra(object): params_values = [ SearchParam('infra_providers', 'All', 'infraproviders', 'Infrastructure Provider : Name', None), SearchParam('clusters', 'All', 'clusters', 'Cluster / Deployment Role : Name', None), SearchParam('hosts', 'All', 'hosts', 'Host / Node : Name', None), SearchParam('hosts', 'All', 'hosts', 'Host / Node.VMs', None), SearchParam('infra_vms', 'VMsOnly', 'vms', 'Virtual Machine : Name', ('sidebar.vms', "All VMs")), SearchParam('infra_templates', 'TemplatesOnly', 'templates', 'Template : Name', ('sidebar.templates', "All Templates")), SearchParam('resource_pools', 'All', 'resource_pools', 'Resource Pool : Name', None), SearchParam('datastores', 'All', 'datastores', 'Datastore : Name', ('sidebar.datastores', "All Datastores")), SearchParam(VmsInstances, 'All', 'workloads_vms', 'VM and Instance : Name', ('vms', "All VMs & Instances")), SearchParam(TemplatesImages, 'All', 'workloads_templates', 'VM Template and Image : Name', ('templates', "All Templates & Images")), ] pytestmark = base_pytestmarks(params_values, True) @inject_tests @pytest.mark.provider([PhysicalProvider], selector=ONE_PER_CATEGORY) class TestPhysical(object): params_values = [ SearchParam('physical_providers', 'All', 'physical_providers', 'Physical Infrastructure Provider : Name', None), SearchParam('physical_servers', 'All', 'physical_servers', 'Physical Server : Name', None) ] pytestmark = base_pytestmarks(params_values, True) @inject_tests @pytest.mark.provider([ContainersProvider], selector=ONE_PER_CATEGORY) class TestContainers(object): params_values = [ SearchParam( 'containers_providers', 'All', 'container_providers', 'Containers Provider : Name', None), SearchParam('container_projects', 'All', 'container_projects', 'Container Project : Name', None), SearchParam('container_routes', 'All', 'container_routes', 'Container Route : Name', None), SearchParam('container_services', 'All', 'container_services', 'Container Service : Name', None), SearchParam('container_replicators', 'All', 'container_replicators', 'Container Replicator : Name', None), SearchParam('container_pods', 'All', 'container_pods', 'Container Pod : Name', None), SearchParam('containers', 'All', 'containers', 'Container : Name', None), SearchParam('container_nodes', 'All', 'container_nodes', 'Container Node : Name', None), SearchParam('container_volumes', 'All', 'container_volumes', 'Persistent Volume : Name', None), SearchParam('container_builds', 'All', 'container_builds', 'Container Build : Name', None), SearchParam('container_image_registries', 'All', 'image_registries', 'Container Image Registry : Name', None), SearchParam('container_images', 'All', 'container_images', 'Container Image : Name', None), SearchParam('container_templates', 'All', 'container_templates', 'Container Template : Name', None) ] pytestmark = base_pytestmarks(params_values, True) @inject_tests class TestAnsibleTower(object): params_values = [ SearchParam('ansible_tower_providers', 'All', 'ansible_tower_explorer_provider', 'Automation Manager (Ansible Tower) : Name', ('sidebar.providers', 'All Ansible Tower Providers')), SearchParam('ansible_tower_systems', 'All', 'ansible_tower_explorer_system', 'Configured System (Ansible Tower) : Hostname', ('sidebar.configured_systems', 'All Ansible Tower Configured Systems')), SearchParam('ansible_tower_job_templates', 'All', 'ansible_tower_explorer_job_templates', 'Job Template (Ansible Tower) : Name', ('sidebar.job_templates', 'All Ansible Tower Job Templates')), SearchParam('ansible_tower_jobs', 'All', 'ansible_tower_jobs', 'Ansible Tower Job : Name', None)] pytestmark = base_pytestmarks(params_values) @inject_tests class TestStorage(object): params_values = [ SearchParam('volumes', 'All', 'block_store_volumes', 'Cloud Volume : Name', None), SearchParam('volume_snapshots', 'All', 'block_store_snapshots', 'Cloud Volume Snapshot : Name', None), SearchParam('volume_backups', 'All', 'block_store_backups', 'Cloud Volume Backup : Name', None), SearchParam('object_store_containers', 'All', 'object_store_containers', 'Cloud Object Store Container : Name', None), SearchParam('object_store_objects', 'All', 'object_store_objects', 'Cloud Object Store Object : Name', None), ] pytestmark = base_pytestmarks(params_values) @inject_tests class TestConfigManagement(object): params_values = [ SearchParam(ConfigManager, 'All', 'configuration_management', 'Configuration Manager : Name', ('sidebar.providers', "All Configuration Management Providers")), SearchParam(ConfigSystem, 'All', 'configuration_management_systems', 'Configured System (Red Hat Satellite) : Hostname', ('sidebar.configured_systems', "All Configured Systems")), ] pytestmark = base_pytestmarks(params_values) @inject_tests @pytest.mark.meta(blockers=[BZ(1733489)]) class TestServices(object): params_values = [SearchParam(MyService, 'All', 'myservices', 'Service : Name', 'myservice')] pytestmark = base_pytestmarks(params_values)
izapolsk/integration_tests
cfme/tests/webui/test_advanced_search.py
cfme/generic_objects/definition/__init__.py
from os import path from urllib.error import URLError import attr from cached_property import cached_property from wrapanapi.systems.container import Openshift from cfme.common import Taggable from cfme.common.provider import DefaultEndpoint from cfme.common.vm_console import ConsoleMixin from cfme.containers.provider import ContainersProvider from cfme.containers.provider import ContainersProviderDefaultEndpoint from cfme.containers.provider import ContainersProviderEndpointsForm from cfme.control.explorer.alert_profiles import NodeAlertProfile from cfme.control.explorer.alert_profiles import ProviderAlertProfile from cfme.utils import ssh from cfme.utils.appliance.implementations.ui import navigate_to from cfme.utils.log import logger from cfme.utils.ocp_cli import OcpCli from cfme.utils.varmeth import variable from cfme.utils.wait import TimedOutError from cfme.utils.wait import wait_for class CustomAttribute(object): def __init__(self, name, value, field_type=None, href=None): self.name = name self.value = value self.field_type = field_type self.href = href class OpenshiftDefaultEndpoint(ContainersProviderDefaultEndpoint): """Represents Openshift default endpoint""" @staticmethod def get_ca_cert(connection_info): """Getting OpenShift's certificate from the master machine. Args: connection_info (dict): username, password and hostname for OCP returns: certificate's content. """ with ssh.SSHClient(**connection_info) as provider_ssh: _, stdout, _ = provider_ssh.exec_command("cat /etc/origin/master/ca.crt") return str("".join(stdout.readlines())) class ServiceBasedEndpoint(DefaultEndpoint): @property def view_value_mapping(self): out = {'hostname': self.hostname, 'api_port': self.api_port, 'sec_protocol': self.sec_protocol} if out['sec_protocol'] and self.sec_protocol.lower() == 'ssl trusting custom ca': out['trusted_ca_certificates'] = OpenshiftDefaultEndpoint.get_ca_cert( {"username": self.ssh_creds.principal, "password": self.ssh_creds.secret, "hostname": self.master_hostname}) return out class VirtualizationEndpoint(ServiceBasedEndpoint): """Represents virtualization Endpoint""" name = 'virtualization' @property def view_value_mapping(self): # values like host, port are taken from Default endpoint # and not editable in Virtualization endpoint, only token can be added return {'kubevirt_token': self.token} class MetricsEndpoint(ServiceBasedEndpoint): """Represents metrics Endpoint""" name = 'metrics' class AlertsEndpoint(ServiceBasedEndpoint): """Represents Alerts Endpoint""" name = 'alerts' @attr.s(cmp=False) class OpenshiftProvider(ContainersProvider, ConsoleMixin, Taggable): num_route = ['num_route'] STATS_TO_MATCH = ContainersProvider.STATS_TO_MATCH + num_route type_name = "openshift" mgmt_class = Openshift db_types = ["Openshift::ContainerManager"] endpoints_form = ContainersProviderEndpointsForm settings_key = 'ems_openshift' ems_pretty_name = 'OpenShift Container Platform' http_proxy = attr.ib(default=None) adv_http = attr.ib(default=None) adv_https = attr.ib(default=None) no_proxy = attr.ib(default=None) image_repo = attr.ib(default=None) image_reg = attr.ib(default=None) image_tag = attr.ib(default=None) cve_loc = attr.ib(default=None) virt_type = attr.ib(default=None) provider = attr.ib(default=None) def create(self, **kwargs): # Enable alerts collection before adding the provider to avoid missing active # alert after adding the provider # For more info: https://bugzilla.redhat.com/show_bug.cgi?id=1514950 if getattr(self, "alerts_type") == "Prometheus": alert_profiles = self.appliance.collections.alert_profiles provider_profile = alert_profiles.instantiate(ProviderAlertProfile, "Prometheus Provider Profile") node_profile = alert_profiles.instantiate(NodeAlertProfile, "Prometheus node Profile") for profile in [provider_profile, node_profile]: profile.assign_to("The Enterprise") super(OpenshiftProvider, self).create(**kwargs) @cached_property def cli(self): return OcpCli(self) def href(self): return self.appliance.rest_api.collections.providers\ .find_by(name=self.name).resources[0].href @property def view_value_mapping(self): mapping = {'name': self.name, 'zone': self.zone, 'prov_type': ('OpenShift Container Platform' if self.appliance.is_downstream else 'OpenShift')} mapping['metrics_type'] = self.metrics_type mapping['alerts_type'] = self.alerts_type mapping['proxy'] = { 'http_proxy': self.http_proxy } mapping['advanced'] = { 'adv_http': self.adv_http, 'adv_https': self.adv_https, 'no_proxy': self.no_proxy, 'image_repo': self.image_repo, 'image_reg': self.image_reg, 'image_tag': self.image_tag, 'cve_loc': self.cve_loc } mapping['virt_type'] = self.virt_type return mapping @property def is_provider_enabled(self): return self.appliance.rest_api.collections.providers.get(name=self.name).enabled @variable(alias='db') def num_route(self): return self._num_db_generic('container_routes') @num_route.variant('ui') def num_route_ui(self): view = navigate_to(self, "Details") return int(view.entities.summary("Relationships").get_text_of('Container Routes')) @variable(alias='db') def num_template(self): return self._num_db_generic('container_templates') @num_template.variant('ui') def num_template_ui(self): view = navigate_to(self, "Details") return int(view.entities.summary("Relationships").get_text_of("Container Templates")) @classmethod def from_config(cls, prov_config, prov_key, appliance=None): appliance = appliance or cls.appliance endpoints = {} token_creds = cls.process_credential_yaml_key(prov_config['credentials'], cred_type='token') master_hostname = prov_config['endpoints']['default'].hostname ssh_creds = cls.process_credential_yaml_key(prov_config['ssh_creds']) for endp in prov_config['endpoints']: # Add ssh_password for each endpoint, so get_ca_cert # will be able to get SSL cert form OCP for each endpoint setattr(prov_config['endpoints'][endp], "master_hostname", master_hostname) setattr(prov_config['endpoints'][endp], "ssh_creds", ssh_creds) if OpenshiftDefaultEndpoint.name == endp: prov_config['endpoints'][endp]['token'] = token_creds.token endpoints[endp] = OpenshiftDefaultEndpoint(**prov_config['endpoints'][endp]) elif MetricsEndpoint.name == endp: endpoints[endp] = MetricsEndpoint(**prov_config['endpoints'][endp]) elif AlertsEndpoint.name == endp: endpoints[endp] = AlertsEndpoint(**prov_config['endpoints'][endp]) else: raise Exception('Unsupported endpoint type "{}".'.format(endp)) settings = prov_config.get('settings', {}) advanced = settings.get('advanced', {}) http_proxy = settings.get('proxy', {}).get('http_proxy') adv_http, adv_https, no_proxy, image_repo, image_reg, image_tag, cve_loc = [ advanced.get(field) for field in ('adv_http', 'adv_https', 'no_proxy', 'image_repo', 'image_reg', 'image_tag', 'cve_loc') ] return appliance.collections.containers_providers.instantiate( prov_class=cls, name=prov_config.get('name'), key=prov_key, zone=prov_config.get('server_zone'), metrics_type=prov_config.get('metrics_type'), alerts_type=prov_config.get('alerts_type'), endpoints=endpoints, provider_data=prov_config, http_proxy=http_proxy, adv_http=adv_http, adv_https=adv_https, no_proxy=no_proxy, image_repo=image_repo, image_reg=image_reg, image_tag=image_tag, cve_loc=cve_loc, virt_type=prov_config.get('virt_type')) def custom_attributes(self): """returns custom attributes""" response = self.appliance.rest_api.get( path.join(self.href(), 'custom_attributes')) out = [] for attr_dict in response['resources']: attr = self.appliance.rest_api.get(attr_dict['href']) out.append( CustomAttribute( attr['name'], attr['value'], (attr['field_type'] if 'field_type' in attr else None), attr_dict['href'] ) ) return out def add_custom_attributes(self, *custom_attributes): """Adding static custom attributes to provider. Args: custom_attributes: The custom attributes to add. returns: response. """ if not custom_attributes: raise TypeError('{} takes at least 1 argument.' .format(self.add_custom_attributes.__name__)) for c_attr in custom_attributes: if not isinstance(c_attr, CustomAttribute): raise TypeError('All arguments should be of type {}. ({} != {})' .format(CustomAttribute, type(c_attr), CustomAttribute)) payload = { "action": "add", "resources": [{ "name": ca.name, "value": str(ca.value) } for ca in custom_attributes]} for i, fld_tp in enumerate([c_attr.field_type for c_attr in custom_attributes]): if fld_tp: payload['resources'][i]['field_type'] = fld_tp return self.appliance.rest_api.post( path.join(self.href(), 'custom_attributes'), **payload) def edit_custom_attributes(self, *custom_attributes): """Editing static custom attributes in provider. Args: custom_attributes: The custom attributes to edit. returns: response. """ if not custom_attributes: raise TypeError('{} takes at least 1 argument.' .format(self.edit_custom_attributes.__name__)) for c_attr in custom_attributes: if not isinstance(c_attr, CustomAttribute): raise TypeError('All arguments should be of type {}. ({} != {})' .format(CustomAttribute, type(c_attr), CustomAttribute)) attribs = self.custom_attributes() payload = { "action": "edit", "resources": [{ "href": [c_attr for c_attr in attribs if c_attr.name == ca.name][-1].href, "value": ca.value } for ca in custom_attributes]} return self.appliance.rest_api.post( path.join(self.href(), 'custom_attributes'), **payload) def delete_custom_attributes(self, *custom_attributes): """Deleting static custom attributes from provider. Args: custom_attributes: The custom attributes to delete. (Could be also names (str)) Returns: response. """ names = [] for c_attr in custom_attributes: attr_type = type(c_attr) if attr_type in (str, CustomAttribute): names.append(c_attr if attr_type is str else c_attr.name) else: raise TypeError('Type of arguments should be either' 'str or CustomAttribute. ({} not in [str, CustomAttribute])' .format(type(c_attr))) attribs = self.custom_attributes() if not names: names = [attrib.name for attrib in attribs] payload = { "action": "delete", "resources": [{ "href": attrib.href, } for attrib in attribs if attrib.name in names]} return self.appliance.rest_api.post( path.join(self.href(), 'custom_attributes'), **payload) def sync_ssl_certificate(self): """ fixture which sync SSL certificate between CFME and OCP Args: provider (OpenShiftProvider): OCP system to sync cert from appliance (IPAppliance): CFME appliance to sync cert with Returns: None """ def _copy_certificate(): is_succeed = True try: # Copy certificate to the appliance provider_ssh.get_file("/etc/origin/master/ca.crt", "/tmp/ca.crt") appliance_ssh.put_file("/tmp/ca.crt", "/etc/pki/ca-trust/source/anchors/{crt}".format( crt=cert_name)) except URLError: logger.debug("Fail to deploy certificate from Openshift to CFME") is_succeed = False finally: return is_succeed provider_ssh = self.cli.ssh_client appliance_ssh = self.appliance.ssh_client() # Connection to the applince in case of dead connection if not appliance_ssh.connected: appliance_ssh.connect() # Checking if SSL is already configured between appliance and provider, # by send a HTTPS request (using SSL) from the appliance to the provider, # hiding the output and sending back the return code of the action _, stdout, stderr = \ appliance_ssh.exec_command( "curl https://{provider}:8443 -sS > /dev/null;echo $?".format( provider=self.provider_data.hostname)) # Do in case of failure (return code is not 0) if stdout.readline().replace('\n', "") != "0": cert_name = "{provider_name}.ca.crt".format( provider_name=self.provider_data.hostname.split(".")[0]) wait_for(_copy_certificate, num_sec=600, delay=30, message="Copy certificate from OCP to CFME") appliance_ssh.exec_command("update-ca-trust") # restarting evemserverd to apply the new SSL certificate self.appliance.evmserverd.restart() self.appliance.evmserverd.wait_for_running() self.appliance.wait_for_web_ui() def get_system_id(self): mgmt_systems_tbl = self.appliance.db.client['ext_management_systems'] return self.appliance.db.client.session.query(mgmt_systems_tbl).filter( mgmt_systems_tbl.name == self.name).first().id def get_metrics(self, **kwargs): """"Returns all the collected metrics for this provider Args: filters: list of dicts with column name and values e.g [{"resource_type": "Container"}, {"parent_ems_id": "1L"}] metrics_table: Metrics table name, there are few metrics table e.g metrics, metric_rollups, etc Returns: Query object with the relevant records """ filters = kwargs.get("filters", {}) metrics_table = kwargs.get("metrics_table", "metric_rollups") metrics_tbl = self.appliance.db.client[metrics_table] mgmt_system_id = self.get_system_id() logger.info("Getting metrics for {name} (parent_ems_id == {id})".format( name=self.name, id=mgmt_system_id)) if filters: logger.info("Filtering by: {f}".format(f=filters)) filters["parent_ems_id"] = mgmt_system_id return self.appliance.db.client.session.query(metrics_tbl).filter_by(**filters) def wait_for_collected_metrics(self, timeout="50m", table_name="metrics"): """Check the db if gathering collection data Args: timeout: timeout in minutes Return: Bool: is collected metrics count is greater than 0 """ def is_collected(): metrics_count = self.get_metrics(table=table_name).count() logger.info("Current metrics found count is {count}".format(count=metrics_count)) return metrics_count > 0 logger.info("Monitoring DB for metrics collection") result = True try: wait_for(is_collected, timeout=timeout, delay=30) except TimedOutError: logger.error( "Timeout exceeded, No metrics found in MIQ DB for the provider \"{name}\"".format( name=self.name)) result = False finally: return result def pause(self): """ Pause the OCP provider. Returns: API response. """ return self.appliance.rest_api.collections.providers.get(name=self.name).action.pause() def resume(self): """ Resume the OCP provider. Returns: API response. """ return self.appliance.rest_api.collections.providers.get(name=self.name).action.resume()
import operator from collections import namedtuple import fauxfactory import pytest from cfme import test_requirements from cfme.cloud.provider import CloudProvider from cfme.containers.provider import ContainersProvider from cfme.infrastructure.config_management import ConfigManager from cfme.infrastructure.config_management import ConfigSystem from cfme.infrastructure.provider import InfraProvider from cfme.markers.env_markers.provider import ONE_PER_CATEGORY from cfme.physical.provider import PhysicalProvider from cfme.services.myservice import MyService from cfme.services.workloads import TemplatesImages from cfme.services.workloads import VmsInstances from cfme.utils.appliance.implementations.ui import navigate_to from cfme.utils.blockers import BZ SearchParam = namedtuple("SearchParam", ["collection", "destination", "entity", "filter", "my_filters"]) pytestmark = [ pytest.mark.uncollectif(lambda param, appliance: (param.collection in [ConfigManager, 'ansible_tower_providers'] or param.filter == 'Job Template (Ansible Tower) : Name') or (appliance.version >= '5.11' and param.entity == 'network_load_balancers'), reason='load balancers are no longer supported in 5.11 -> BZ 1672949'), pytest.mark.meta(automates=[BZ(1402392)]) # should be only on test_filter_crud ] def _navigation(param, appliance): if isinstance(param.collection, str): view = navigate_to(getattr(appliance.collections, param.collection), param.destination) else: view = navigate_to(param.collection, param.destination) return view def _filter_displayed(filters, filter): if filters.is_displayed: assert filter, "Filter wasn't created!" else: pytest.fail("Filter wasn't created or filters tree is not displayed!") def _select_filter(filters, filter_name, param): if param.my_filters: if isinstance(param.my_filters, tuple): filters.tree.click_path(param.my_filters[1], "My Filters", filter_name) else: filters.tree.click_path("My Filters", filter_name) else: filters.navigation.select(filter_name) def _can_open_advanced_search(param, appliance): """ Polarion: assignee: anikifor casecomponent: WebUI caseimportance: critical initialEstimate: 1/10h """ view = _navigation(param, appliance) assert view.search.is_advanced_search_possible, (f"Advanced search not displayed " f"for {param.entity} " f"on {param.destination.lower()}") view.search.open_advanced_search() assert view.search.is_advanced_search_opened, (f"Advanced search failed to open " f"for {param.entity} " f"on {param.destination.lower()}") view.search.close_advanced_search() assert not view.search.is_advanced_search_opened, (f"Advanced search failed to close " f"for {param.entity} " f"on {param.destination.lower()}") def _filter_crud(param, appliance): """ Polarion: assignee: anikifor casecomponent: WebUI caseimportance: high initialEstimate: 1/10h """ filter_name = fauxfactory.gen_string('alphanumeric', 10) filter_value = fauxfactory.gen_string('alphanumeric', 10) filter_value_updated = fauxfactory.gen_string('alphanumeric', 10) view = _navigation(param, appliance) # create if ':' not in param.filter: # to test "Count of" field, values don't contain ':' filter_value = fauxfactory.gen_numeric_string(3) filter_value_updated = fauxfactory.gen_numeric_string(3) view.search.save_filter( "fill_count({}, =, {})".format(param.filter, filter_value), filter_name) else: view.search.save_filter( "fill_field({}, =, {})".format(param.filter, filter_value), filter_name) view.search.close_advanced_search() view.flash.assert_no_error() # read if param.my_filters: if isinstance(param.my_filters, tuple): filters = operator.attrgetter(param.my_filters[0])(view) _filter_displayed(filters, filters.tree.has_path(param.my_filters[1], "My Filters", filter_name)) else: filters = operator.attrgetter(param.my_filters)(view) _filter_displayed(filters, filters.tree.has_path("My Filters", filter_name)) else: filters = view.my_filters _filter_displayed(filters, filters.navigation.has_item(filter_name)) # update _select_filter(filters, filter_name, param) view.search.open_advanced_search() view.search.advanced_search_form.search_exp_editor.select_first_expression() if ':' not in param.filter: # to test "Count of" field view.search.advanced_search_form.search_exp_editor.fill_count(count=param.filter, key='=', value=filter_value_updated) else: view.search.advanced_search_form.search_exp_editor.fill_field(field=param.filter, key='=', value=filter_value_updated) # save expression view.search.advanced_search_form.save_filter_button.click() # save filter view.search.advanced_search_form.save_filter_button.click() view.search.close_advanced_search() _select_filter(filters, filter_name, param) # read after update view.search.open_advanced_search() exp_text = view.search.advanced_search_form.search_exp_editor.expression_text assert filter_value_updated in exp_text, "Filter wasn't changed!" # delete view.search.delete_filter() view.search.close_advanced_search() if param.my_filters: if filters.is_displayed: if isinstance(param.my_filters, tuple): assert not filters.tree.has_path(param.my_filters[1], "My Filters", filter_name), "Filter wasn't deleted!" else: assert not filters.tree.has_path("My Filters", filter_name), "Filter wasn't deleted!" else: if view.my_filters.is_displayed: assert not view.my_filters.navigation.has_item(filter_name), "Filter wasn't deleted!" _tests = [_can_open_advanced_search, _filter_crud] def methodized(metafunc): """Transform function to method by adding self argument works just for specific functions in this file, would be nice to generalize TODO generalize for more tests with possibly different arguments """ def func(self, param, appliance): return metafunc(param, appliance) func.__doc__ = metafunc.__doc__ return func def inject_tests(metaclass): """Attach tests to decorated class uses _tests - list of test functions """ for test in _tests: method = methodized(test) setattr(metaclass, f"test{test.__name__}", method) return metaclass def base_pytestmarks(param_values, setup_prov=False): return [ test_requirements.filtering, pytest.mark.parametrize( 'param', param_values, ids=['{}-{}'.format(param.entity, param.destination.lower()) for param in param_values], scope="class" )] + ([pytest.mark.usefixtures("setup_provider")] if setup_prov else []) @inject_tests @pytest.mark.provider([CloudProvider], selector=ONE_PER_CATEGORY) class TestCloud(object): params_values = [ SearchParam('cloud_providers', 'All', 'cloudprovider', 'Cloud Provider : Name', None), SearchParam('cloud_av_zones', 'All', 'availabilityzone', 'Availability Zone : Name', None), SearchParam('cloud_host_aggregates', 'All', 'hostaggregate', 'Host Aggregate : Name', None), SearchParam('cloud_tenants', 'All', 'tenant', 'Cloud Tenant : Name', None), SearchParam('cloud_flavors', 'All', 'flavor', 'Flavor : Name', None), SearchParam('cloud_instances', 'All', 'instances', 'Instance : Name', ('sidebar.instances', "All Instances")), SearchParam('cloud_images', 'All', 'images', 'Image : Name', ('sidebar.images', "All Images")), SearchParam('cloud_stacks', 'All', 'orchestration_stacks', 'Orchestration Stack : Name', None), SearchParam('cloud_keypairs', 'All', 'key_pairs', 'Key Pair : Name', None) ] pytestmark = base_pytestmarks(params_values, True) @inject_tests @pytest.mark.provider([CloudProvider], selector=ONE_PER_CATEGORY) class TestNetwork(object): params_values = [ SearchParam('network_providers', 'All', 'network_managers', 'Network Manager : Name', None), SearchParam('network_providers', 'All', 'network_managers', 'Network Manager : Name', None), SearchParam('cloud_networks', 'All', 'network_networks', 'Cloud Network : Name', None), SearchParam('network_subnets', 'All', 'network_subnets', 'Cloud Subnet : Name', None), SearchParam('network_routers', 'All', 'network_routers', 'Network Router : Name', None), SearchParam('network_security_groups', 'All', 'network_security_groups', 'Security Group : Name', None), SearchParam('network_floating_ips', 'All', 'network_floating_ips', 'Floating IP : Address', None), SearchParam('network_ports', 'All', 'network_ports', 'Network Port : Name', None), SearchParam('balancers', 'All', 'network_load_balancers', 'Load Balancer : Name', None)] pytestmark = base_pytestmarks(params_values, True) @inject_tests @pytest.mark.provider([InfraProvider], selector=ONE_PER_CATEGORY, ) class TestInfra(object): params_values = [ SearchParam('infra_providers', 'All', 'infraproviders', 'Infrastructure Provider : Name', None), SearchParam('clusters', 'All', 'clusters', 'Cluster / Deployment Role : Name', None), SearchParam('hosts', 'All', 'hosts', 'Host / Node : Name', None), SearchParam('hosts', 'All', 'hosts', 'Host / Node.VMs', None), SearchParam('infra_vms', 'VMsOnly', 'vms', 'Virtual Machine : Name', ('sidebar.vms', "All VMs")), SearchParam('infra_templates', 'TemplatesOnly', 'templates', 'Template : Name', ('sidebar.templates', "All Templates")), SearchParam('resource_pools', 'All', 'resource_pools', 'Resource Pool : Name', None), SearchParam('datastores', 'All', 'datastores', 'Datastore : Name', ('sidebar.datastores', "All Datastores")), SearchParam(VmsInstances, 'All', 'workloads_vms', 'VM and Instance : Name', ('vms', "All VMs & Instances")), SearchParam(TemplatesImages, 'All', 'workloads_templates', 'VM Template and Image : Name', ('templates', "All Templates & Images")), ] pytestmark = base_pytestmarks(params_values, True) @inject_tests @pytest.mark.provider([PhysicalProvider], selector=ONE_PER_CATEGORY) class TestPhysical(object): params_values = [ SearchParam('physical_providers', 'All', 'physical_providers', 'Physical Infrastructure Provider : Name', None), SearchParam('physical_servers', 'All', 'physical_servers', 'Physical Server : Name', None) ] pytestmark = base_pytestmarks(params_values, True) @inject_tests @pytest.mark.provider([ContainersProvider], selector=ONE_PER_CATEGORY) class TestContainers(object): params_values = [ SearchParam( 'containers_providers', 'All', 'container_providers', 'Containers Provider : Name', None), SearchParam('container_projects', 'All', 'container_projects', 'Container Project : Name', None), SearchParam('container_routes', 'All', 'container_routes', 'Container Route : Name', None), SearchParam('container_services', 'All', 'container_services', 'Container Service : Name', None), SearchParam('container_replicators', 'All', 'container_replicators', 'Container Replicator : Name', None), SearchParam('container_pods', 'All', 'container_pods', 'Container Pod : Name', None), SearchParam('containers', 'All', 'containers', 'Container : Name', None), SearchParam('container_nodes', 'All', 'container_nodes', 'Container Node : Name', None), SearchParam('container_volumes', 'All', 'container_volumes', 'Persistent Volume : Name', None), SearchParam('container_builds', 'All', 'container_builds', 'Container Build : Name', None), SearchParam('container_image_registries', 'All', 'image_registries', 'Container Image Registry : Name', None), SearchParam('container_images', 'All', 'container_images', 'Container Image : Name', None), SearchParam('container_templates', 'All', 'container_templates', 'Container Template : Name', None) ] pytestmark = base_pytestmarks(params_values, True) @inject_tests class TestAnsibleTower(object): params_values = [ SearchParam('ansible_tower_providers', 'All', 'ansible_tower_explorer_provider', 'Automation Manager (Ansible Tower) : Name', ('sidebar.providers', 'All Ansible Tower Providers')), SearchParam('ansible_tower_systems', 'All', 'ansible_tower_explorer_system', 'Configured System (Ansible Tower) : Hostname', ('sidebar.configured_systems', 'All Ansible Tower Configured Systems')), SearchParam('ansible_tower_job_templates', 'All', 'ansible_tower_explorer_job_templates', 'Job Template (Ansible Tower) : Name', ('sidebar.job_templates', 'All Ansible Tower Job Templates')), SearchParam('ansible_tower_jobs', 'All', 'ansible_tower_jobs', 'Ansible Tower Job : Name', None)] pytestmark = base_pytestmarks(params_values) @inject_tests class TestStorage(object): params_values = [ SearchParam('volumes', 'All', 'block_store_volumes', 'Cloud Volume : Name', None), SearchParam('volume_snapshots', 'All', 'block_store_snapshots', 'Cloud Volume Snapshot : Name', None), SearchParam('volume_backups', 'All', 'block_store_backups', 'Cloud Volume Backup : Name', None), SearchParam('object_store_containers', 'All', 'object_store_containers', 'Cloud Object Store Container : Name', None), SearchParam('object_store_objects', 'All', 'object_store_objects', 'Cloud Object Store Object : Name', None), ] pytestmark = base_pytestmarks(params_values) @inject_tests class TestConfigManagement(object): params_values = [ SearchParam(ConfigManager, 'All', 'configuration_management', 'Configuration Manager : Name', ('sidebar.providers', "All Configuration Management Providers")), SearchParam(ConfigSystem, 'All', 'configuration_management_systems', 'Configured System (Red Hat Satellite) : Hostname', ('sidebar.configured_systems', "All Configured Systems")), ] pytestmark = base_pytestmarks(params_values) @inject_tests @pytest.mark.meta(blockers=[BZ(1733489)]) class TestServices(object): params_values = [SearchParam(MyService, 'All', 'myservices', 'Service : Name', 'myservice')] pytestmark = base_pytestmarks(params_values)
izapolsk/integration_tests
cfme/tests/webui/test_advanced_search.py
cfme/containers/provider/openshift.py